From 766b2bf5a6d96832ae7fb66714245db8d3288690 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 24 May 2023 22:48:36 +0000 Subject: [PATCH 1/4] feat: Added client library support for ModerateText in the Natural Language API (V1Beta2) PiperOrigin-RevId: 534975458 Source-Link: https://github.com/googleapis/googleapis/commit/392db1d5b03abe71647a69b28ea84b212a9978a9 Source-Link: https://github.com/googleapis/googleapis-gen/commit/df98b94b1218af16109fe3e1aef329732779c0a9 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGY5OGI5NGIxMjE4YWYxNjEwOWZlM2UxYWVmMzI5NzMyNzc5YzBhOSJ9 --- owl-bot-staging/v1/.coveragerc | 13 + owl-bot-staging/v1/.flake8 | 33 + owl-bot-staging/v1/MANIFEST.in | 2 + owl-bot-staging/v1/README.rst | 49 + owl-bot-staging/v1/docs/conf.py | 376 ++ owl-bot-staging/v1/docs/index.rst | 7 + .../v1/docs/language_v1/language_service.rst | 6 + .../v1/docs/language_v1/services.rst | 6 + owl-bot-staging/v1/docs/language_v1/types.rst | 6 + .../v1/google/cloud/language/__init__.py | 75 + .../v1/google/cloud/language/gapic_version.py | 16 + .../v1/google/cloud/language/py.typed | 2 + .../v1/google/cloud/language_v1/__init__.py | 76 + .../cloud/language_v1/gapic_metadata.json | 118 + .../google/cloud/language_v1/gapic_version.py | 16 + .../v1/google/cloud/language_v1/py.typed | 2 + .../cloud/language_v1/services/__init__.py | 15 + .../services/language_service/__init__.py | 22 + .../services/language_service/async_client.py | 867 ++++ .../services/language_service/client.py | 1020 +++++ .../language_service/transports/__init__.py | 38 + .../language_service/transports/base.py | 261 ++ .../language_service/transports/grpc.py | 405 ++ .../transports/grpc_asyncio.py | 404 ++ .../language_service/transports/rest.py | 907 ++++ .../cloud/language_v1/types/__init__.py | 68 + .../language_v1/types/language_service.py | 1677 +++++++ owl-bot-staging/v1/mypy.ini | 3 + owl-bot-staging/v1/noxfile.py | 184 + ...language_service_analyze_entities_async.py | 55 + ..._language_service_analyze_entities_sync.py | 55 + ..._service_analyze_entity_sentiment_async.py | 55 + ...e_service_analyze_entity_sentiment_sync.py | 55 + ...anguage_service_analyze_sentiment_async.py | 55 + ...language_service_analyze_sentiment_sync.py | 55 + ...d_language_service_analyze_syntax_async.py | 55 + ...ed_language_service_analyze_syntax_sync.py | 55 + ...ed_language_service_annotate_text_async.py | 55 + ...ted_language_service_annotate_text_sync.py | 55 + ...ed_language_service_classify_text_async.py | 55 + ...ted_language_service_classify_text_sync.py | 55 + ...pet_metadata_google.cloud.language.v1.json | 1029 +++++ .../v1/scripts/fixup_language_v1_keywords.py | 181 + owl-bot-staging/v1/setup.py | 90 + .../v1/testing/constraints-3.10.txt | 6 + .../v1/testing/constraints-3.11.txt | 6 + .../v1/testing/constraints-3.12.txt | 6 + .../v1/testing/constraints-3.7.txt | 9 + .../v1/testing/constraints-3.8.txt | 6 + .../v1/testing/constraints-3.9.txt | 6 + owl-bot-staging/v1/tests/__init__.py | 16 + owl-bot-staging/v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/language_v1/__init__.py | 16 + .../language_v1/test_language_service.py | 3674 +++++++++++++++ owl-bot-staging/v1beta2/.coveragerc | 13 + owl-bot-staging/v1beta2/.flake8 | 33 + owl-bot-staging/v1beta2/MANIFEST.in | 2 + owl-bot-staging/v1beta2/README.rst | 49 + owl-bot-staging/v1beta2/docs/conf.py | 376 ++ owl-bot-staging/v1beta2/docs/index.rst | 7 + .../language_v1beta2/language_service.rst | 6 + .../docs/language_v1beta2/services.rst | 6 + .../v1beta2/docs/language_v1beta2/types.rst | 6 + .../v1beta2/google/cloud/language/__init__.py | 79 + .../google/cloud/language/gapic_version.py | 16 + .../v1beta2/google/cloud/language/py.typed | 2 + .../google/cloud/language_v1beta2/__init__.py | 80 + .../language_v1beta2/gapic_metadata.json | 133 + .../cloud/language_v1beta2/gapic_version.py | 16 + .../google/cloud/language_v1beta2/py.typed | 2 + .../language_v1beta2/services/__init__.py | 15 + .../services/language_service/__init__.py | 22 + .../services/language_service/async_client.py | 963 ++++ .../services/language_service/client.py | 1116 +++++ .../language_service/transports/__init__.py | 38 + .../language_service/transports/base.py | 275 ++ .../language_service/transports/grpc.py | 432 ++ .../transports/grpc_asyncio.py | 431 ++ .../language_service/transports/rest.py | 1029 +++++ .../cloud/language_v1beta2/types/__init__.py | 72 + .../types/language_service.py | 1761 +++++++ owl-bot-staging/v1beta2/mypy.ini | 3 + owl-bot-staging/v1beta2/noxfile.py | 184 + ...language_service_analyze_entities_async.py | 55 + ..._language_service_analyze_entities_sync.py | 55 + ..._service_analyze_entity_sentiment_async.py | 55 + ...e_service_analyze_entity_sentiment_sync.py | 55 + ...anguage_service_analyze_sentiment_async.py | 55 + ...language_service_analyze_sentiment_sync.py | 55 + ...d_language_service_analyze_syntax_async.py | 55 + ...ed_language_service_analyze_syntax_sync.py | 55 + ...ed_language_service_annotate_text_async.py | 55 + ...ted_language_service_annotate_text_sync.py | 55 + ...ed_language_service_classify_text_async.py | 55 + ...ted_language_service_classify_text_sync.py | 55 + ...ed_language_service_moderate_text_async.py | 55 + ...ted_language_service_moderate_text_sync.py | 55 + ...etadata_google.cloud.language.v1beta2.json | 1190 +++++ .../fixup_language_v1beta2_keywords.py | 182 + owl-bot-staging/v1beta2/setup.py | 90 + .../v1beta2/testing/constraints-3.10.txt | 6 + .../v1beta2/testing/constraints-3.11.txt | 6 + .../v1beta2/testing/constraints-3.12.txt | 6 + .../v1beta2/testing/constraints-3.7.txt | 9 + .../v1beta2/testing/constraints-3.8.txt | 6 + .../v1beta2/testing/constraints-3.9.txt | 6 + owl-bot-staging/v1beta2/tests/__init__.py | 16 + .../v1beta2/tests/unit/__init__.py | 16 + .../v1beta2/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/language_v1beta2/__init__.py | 16 + .../language_v1beta2/test_language_service.py | 4070 +++++++++++++++++ 112 files changed, 25983 insertions(+) create mode 100644 owl-bot-staging/v1/.coveragerc create mode 100644 owl-bot-staging/v1/.flake8 create mode 100644 owl-bot-staging/v1/MANIFEST.in create mode 100644 owl-bot-staging/v1/README.rst create mode 100644 owl-bot-staging/v1/docs/conf.py create mode 100644 owl-bot-staging/v1/docs/index.rst create mode 100644 owl-bot-staging/v1/docs/language_v1/language_service.rst create mode 100644 owl-bot-staging/v1/docs/language_v1/services.rst create mode 100644 owl-bot-staging/v1/docs/language_v1/types.rst create mode 100644 owl-bot-staging/v1/google/cloud/language/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/language/gapic_version.py create mode 100644 owl-bot-staging/v1/google/cloud/language/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py create mode 100644 owl-bot-staging/v1/mypy.ini create mode 100644 owl-bot-staging/v1/noxfile.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json create mode 100644 owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py create mode 100644 owl-bot-staging/v1/setup.py create mode 100644 owl-bot-staging/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/v1/tests/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py create mode 100644 owl-bot-staging/v1beta2/.coveragerc create mode 100644 owl-bot-staging/v1beta2/.flake8 create mode 100644 owl-bot-staging/v1beta2/MANIFEST.in create mode 100644 owl-bot-staging/v1beta2/README.rst create mode 100644 owl-bot-staging/v1beta2/docs/conf.py create mode 100644 owl-bot-staging/v1beta2/docs/index.rst create mode 100644 owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst create mode 100644 owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst create mode 100644 owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst create mode 100644 owl-bot-staging/v1beta2/google/cloud/language/__init__.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language/py.typed create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py create mode 100644 owl-bot-staging/v1beta2/mypy.ini create mode 100644 owl-bot-staging/v1beta2/noxfile.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json create mode 100644 owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py create mode 100644 owl-bot-staging/v1beta2/setup.py create mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/v1beta2/tests/__init__.py create mode 100644 owl-bot-staging/v1beta2/tests/unit/__init__.py create mode 100644 owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py create mode 100644 owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc new file mode 100644 index 00000000..c1f51536 --- /dev/null +++ b/owl-bot-staging/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/language/__init__.py + google/cloud/language/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v1/.flake8 b/owl-bot-staging/v1/.flake8 new file mode 100644 index 00000000..29227d4c --- /dev/null +++ b/owl-bot-staging/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in new file mode 100644 index 00000000..e0f21a43 --- /dev/null +++ b/owl-bot-staging/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/language *.py +recursive-include google/cloud/language_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst new file mode 100644 index 00000000..0c5f1b6b --- /dev/null +++ b/owl-bot-staging/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Language API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Language API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py new file mode 100644 index 00000000..2e1b322d --- /dev/null +++ b/owl-bot-staging/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-language documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-language" +copyright = u"2022, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-language-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-language.tex", + u"google-cloud-language Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-language", + u"Google Cloud Language Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-language", + u"google-cloud-language Documentation", + author, + "google-cloud-language", + "GAPIC library for Google Cloud Language API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst new file mode 100644 index 00000000..90928956 --- /dev/null +++ b/owl-bot-staging/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + language_v1/services + language_v1/types diff --git a/owl-bot-staging/v1/docs/language_v1/language_service.rst b/owl-bot-staging/v1/docs/language_v1/language_service.rst new file mode 100644 index 00000000..96e8755a --- /dev/null +++ b/owl-bot-staging/v1/docs/language_v1/language_service.rst @@ -0,0 +1,6 @@ +LanguageService +--------------------------------- + +.. automodule:: google.cloud.language_v1.services.language_service + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/language_v1/services.rst b/owl-bot-staging/v1/docs/language_v1/services.rst new file mode 100644 index 00000000..26f74fe9 --- /dev/null +++ b/owl-bot-staging/v1/docs/language_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Language v1 API +========================================= +.. toctree:: + :maxdepth: 2 + + language_service diff --git a/owl-bot-staging/v1/docs/language_v1/types.rst b/owl-bot-staging/v1/docs/language_v1/types.rst new file mode 100644 index 00000000..5dd3769e --- /dev/null +++ b/owl-bot-staging/v1/docs/language_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Language v1 API +====================================== + +.. automodule:: google.cloud.language_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v1/google/cloud/language/__init__.py b/owl-bot-staging/v1/google/cloud/language/__init__.py new file mode 100644 index 00000000..ff0761f6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language/__init__.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.language import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.language_v1.services.language_service.client import LanguageServiceClient +from google.cloud.language_v1.services.language_service.async_client import LanguageServiceAsyncClient + +from google.cloud.language_v1.types.language_service import AnalyzeEntitiesRequest +from google.cloud.language_v1.types.language_service import AnalyzeEntitiesResponse +from google.cloud.language_v1.types.language_service import AnalyzeEntitySentimentRequest +from google.cloud.language_v1.types.language_service import AnalyzeEntitySentimentResponse +from google.cloud.language_v1.types.language_service import AnalyzeSentimentRequest +from google.cloud.language_v1.types.language_service import AnalyzeSentimentResponse +from google.cloud.language_v1.types.language_service import AnalyzeSyntaxRequest +from google.cloud.language_v1.types.language_service import AnalyzeSyntaxResponse +from google.cloud.language_v1.types.language_service import AnnotateTextRequest +from google.cloud.language_v1.types.language_service import AnnotateTextResponse +from google.cloud.language_v1.types.language_service import ClassificationCategory +from google.cloud.language_v1.types.language_service import ClassificationModelOptions +from google.cloud.language_v1.types.language_service import ClassifyTextRequest +from google.cloud.language_v1.types.language_service import ClassifyTextResponse +from google.cloud.language_v1.types.language_service import DependencyEdge +from google.cloud.language_v1.types.language_service import Document +from google.cloud.language_v1.types.language_service import Entity +from google.cloud.language_v1.types.language_service import EntityMention +from google.cloud.language_v1.types.language_service import PartOfSpeech +from google.cloud.language_v1.types.language_service import Sentence +from google.cloud.language_v1.types.language_service import Sentiment +from google.cloud.language_v1.types.language_service import TextSpan +from google.cloud.language_v1.types.language_service import Token +from google.cloud.language_v1.types.language_service import EncodingType + +__all__ = ('LanguageServiceClient', + 'LanguageServiceAsyncClient', + 'AnalyzeEntitiesRequest', + 'AnalyzeEntitiesResponse', + 'AnalyzeEntitySentimentRequest', + 'AnalyzeEntitySentimentResponse', + 'AnalyzeSentimentRequest', + 'AnalyzeSentimentResponse', + 'AnalyzeSyntaxRequest', + 'AnalyzeSyntaxResponse', + 'AnnotateTextRequest', + 'AnnotateTextResponse', + 'ClassificationCategory', + 'ClassificationModelOptions', + 'ClassifyTextRequest', + 'ClassifyTextResponse', + 'DependencyEdge', + 'Document', + 'Entity', + 'EntityMention', + 'PartOfSpeech', + 'Sentence', + 'Sentiment', + 'TextSpan', + 'Token', + 'EncodingType', +) diff --git a/owl-bot-staging/v1/google/cloud/language/gapic_version.py b/owl-bot-staging/v1/google/cloud/language/gapic_version.py new file mode 100644 index 00000000..405b1ceb --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/language/py.typed b/owl-bot-staging/v1/google/cloud/language/py.typed new file mode 100644 index 00000000..c0acc99a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/language_v1/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/__init__.py new file mode 100644 index 00000000..d2eb7660 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/__init__.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.language_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.language_service import LanguageServiceClient +from .services.language_service import LanguageServiceAsyncClient + +from .types.language_service import AnalyzeEntitiesRequest +from .types.language_service import AnalyzeEntitiesResponse +from .types.language_service import AnalyzeEntitySentimentRequest +from .types.language_service import AnalyzeEntitySentimentResponse +from .types.language_service import AnalyzeSentimentRequest +from .types.language_service import AnalyzeSentimentResponse +from .types.language_service import AnalyzeSyntaxRequest +from .types.language_service import AnalyzeSyntaxResponse +from .types.language_service import AnnotateTextRequest +from .types.language_service import AnnotateTextResponse +from .types.language_service import ClassificationCategory +from .types.language_service import ClassificationModelOptions +from .types.language_service import ClassifyTextRequest +from .types.language_service import ClassifyTextResponse +from .types.language_service import DependencyEdge +from .types.language_service import Document +from .types.language_service import Entity +from .types.language_service import EntityMention +from .types.language_service import PartOfSpeech +from .types.language_service import Sentence +from .types.language_service import Sentiment +from .types.language_service import TextSpan +from .types.language_service import Token +from .types.language_service import EncodingType + +__all__ = ( + 'LanguageServiceAsyncClient', +'AnalyzeEntitiesRequest', +'AnalyzeEntitiesResponse', +'AnalyzeEntitySentimentRequest', +'AnalyzeEntitySentimentResponse', +'AnalyzeSentimentRequest', +'AnalyzeSentimentResponse', +'AnalyzeSyntaxRequest', +'AnalyzeSyntaxResponse', +'AnnotateTextRequest', +'AnnotateTextResponse', +'ClassificationCategory', +'ClassificationModelOptions', +'ClassifyTextRequest', +'ClassifyTextResponse', +'DependencyEdge', +'Document', +'EncodingType', +'Entity', +'EntityMention', +'LanguageServiceClient', +'PartOfSpeech', +'Sentence', +'Sentiment', +'TextSpan', +'Token', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json new file mode 100644 index 00000000..e475aad9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json @@ -0,0 +1,118 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.language_v1", + "protoPackage": "google.cloud.language.v1", + "schema": "1.0", + "services": { + "LanguageService": { + "clients": { + "grpc": { + "libraryClient": "LanguageServiceClient", + "rpcs": { + "AnalyzeEntities": { + "methods": [ + "analyze_entities" + ] + }, + "AnalyzeEntitySentiment": { + "methods": [ + "analyze_entity_sentiment" + ] + }, + "AnalyzeSentiment": { + "methods": [ + "analyze_sentiment" + ] + }, + "AnalyzeSyntax": { + "methods": [ + "analyze_syntax" + ] + }, + "AnnotateText": { + "methods": [ + "annotate_text" + ] + }, + "ClassifyText": { + "methods": [ + "classify_text" + ] + } + } + }, + "grpc-async": { + "libraryClient": "LanguageServiceAsyncClient", + "rpcs": { + "AnalyzeEntities": { + "methods": [ + "analyze_entities" + ] + }, + "AnalyzeEntitySentiment": { + "methods": [ + "analyze_entity_sentiment" + ] + }, + "AnalyzeSentiment": { + "methods": [ + "analyze_sentiment" + ] + }, + "AnalyzeSyntax": { + "methods": [ + "analyze_syntax" + ] + }, + "AnnotateText": { + "methods": [ + "annotate_text" + ] + }, + "ClassifyText": { + "methods": [ + "classify_text" + ] + } + } + }, + "rest": { + "libraryClient": "LanguageServiceClient", + "rpcs": { + "AnalyzeEntities": { + "methods": [ + "analyze_entities" + ] + }, + "AnalyzeEntitySentiment": { + "methods": [ + "analyze_entity_sentiment" + ] + }, + "AnalyzeSentiment": { + "methods": [ + "analyze_sentiment" + ] + }, + "AnalyzeSyntax": { + "methods": [ + "analyze_syntax" + ] + }, + "AnnotateText": { + "methods": [ + "annotate_text" + ] + }, + "ClassifyText": { + "methods": [ + "classify_text" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py b/owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py new file mode 100644 index 00000000..405b1ceb --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/language_v1/py.typed b/owl-bot-staging/v1/google/cloud/language_v1/py.typed new file mode 100644 index 00000000..c0acc99a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py new file mode 100644 index 00000000..e8e1c384 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py new file mode 100644 index 00000000..6e5f9052 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import LanguageServiceClient +from .async_client import LanguageServiceAsyncClient + +__all__ = ( + 'LanguageServiceClient', + 'LanguageServiceAsyncClient', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py new file mode 100644 index 00000000..565636d9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py @@ -0,0 +1,867 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.language_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.language_v1.types import language_service +from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .client import LanguageServiceClient + + +class LanguageServiceAsyncClient: + """Provides text analysis operations such as sentiment analysis + and entity recognition. + """ + + _client: LanguageServiceClient + + DEFAULT_ENDPOINT = LanguageServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LanguageServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod(LanguageServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(LanguageServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(LanguageServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(LanguageServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(LanguageServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(LanguageServiceClient.parse_common_organization_path) + common_project_path = staticmethod(LanguageServiceClient.common_project_path) + parse_common_project_path = staticmethod(LanguageServiceClient.parse_common_project_path) + common_location_path = staticmethod(LanguageServiceClient.common_location_path) + parse_common_location_path = staticmethod(LanguageServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceAsyncClient: The constructed client. + """ + return LanguageServiceClient.from_service_account_info.__func__(LanguageServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceAsyncClient: The constructed client. + """ + return LanguageServiceClient.from_service_account_file.__func__(LanguageServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return LanguageServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> LanguageServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LanguageServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(LanguageServiceClient).get_transport_class, type(LanguageServiceClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, LanguageServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the language service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.LanguageServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = LanguageServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def analyze_sentiment(self, + request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: + r"""Analyzes the sentiment of the provided text. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + async def sample_analyze_sentiment(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1.types.AnalyzeSentimentRequest, dict]]): + The request object. The sentiment analysis request + message. + document (:class:`google.cloud.language_v1.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): + The encoding type used by the API to + calculate sentence offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeSentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_entities(self, + request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + async def sample_analyze_entities(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entities(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1.types.AnalyzeEntitiesRequest, dict]]): + The request object. The entity analysis request message. + document (:class:`google.cloud.language_v1.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeEntitiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_entities, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_entity_sentiment(self, + request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + async def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1.types.AnalyzeEntitySentimentRequest, dict]]): + The request object. The entity-level sentiment analysis + request message. + document (:class:`google.cloud.language_v1.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeEntitySentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_entity_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_syntax(self, + request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + async def sample_analyze_syntax(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = await client.analyze_syntax(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1.types.AnalyzeSyntaxRequest, dict]]): + The request object. The syntax analysis request message. + document (:class:`google.cloud.language_v1.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeSyntaxRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_syntax, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def classify_text(self, + request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ClassifyTextResponse: + r"""Classifies a document into categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + async def sample_classify_text(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = await client.classify_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1.types.ClassifyTextRequest, dict]]): + The request object. The document classification request + message. + document (:class:`google.cloud.language_v1.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.ClassifyTextResponse: + The document classification response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.ClassifyTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.classify_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def annotate_text(self, + request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + features: Optional[language_service.AnnotateTextRequest.Features] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnnotateTextResponse: + r"""A convenience method that provides all the features + that analyzeSentiment, analyzeEntities, and + analyzeSyntax provide in one call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + async def sample_annotate_text(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = await client.annotate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1.types.AnnotateTextRequest, dict]]): + The request object. The request message for the text + annotation API, which can perform + multiple analysis types (sentiment, + entities, and syntax) in one call. + document (:class:`google.cloud.language_v1.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (:class:`google.cloud.language_v1.types.AnnotateTextRequest.Features`): + Required. The enabled features. + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnnotateTextResponse: + The text annotations response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, features, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnnotateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if features is not None: + request.features = features + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.annotate_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "LanguageServiceAsyncClient", +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py new file mode 100644 index 00000000..831092ef --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py @@ -0,0 +1,1020 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.language_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.language_v1.types import language_service +from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import LanguageServiceGrpcTransport +from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .transports.rest import LanguageServiceRestTransport + + +class LanguageServiceClientMeta(type): + """Metaclass for the LanguageService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] + _transport_registry["grpc"] = LanguageServiceGrpcTransport + _transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport + _transport_registry["rest"] = LanguageServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[LanguageServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LanguageServiceClient(metaclass=LanguageServiceClientMeta): + """Provides text analysis operations such as sentiment analysis + and entity recognition. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "language.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LanguageServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LanguageServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, LanguageServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the language service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, LanguageServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, LanguageServiceTransport): + # transport is a LanguageServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def analyze_sentiment(self, + request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: + r"""Analyzes the sentiment of the provided text. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + def sample_analyze_sentiment(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1.types.AnalyzeSentimentRequest, dict]): + The request object. The sentiment analysis request + message. + document (google.cloud.language_v1.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate sentence offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeSentimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeSentimentRequest): + request = language_service.AnalyzeSentimentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_sentiment] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_entities(self, + request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + def sample_analyze_entities(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = client.analyze_entities(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1.types.AnalyzeEntitiesRequest, dict]): + The request object. The entity analysis request message. + document (google.cloud.language_v1.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeEntitiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeEntitiesRequest): + request = language_service.AnalyzeEntitiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_entities] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_entity_sentiment(self, + request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1.types.AnalyzeEntitySentimentRequest, dict]): + The request object. The entity-level sentiment analysis + request message. + document (google.cloud.language_v1.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeEntitySentimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeEntitySentimentRequest): + request = language_service.AnalyzeEntitySentimentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_entity_sentiment] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_syntax(self, + request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + def sample_analyze_syntax(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = client.analyze_syntax(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1.types.AnalyzeSyntaxRequest, dict]): + The request object. The syntax analysis request message. + document (google.cloud.language_v1.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeSyntaxRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeSyntaxRequest): + request = language_service.AnalyzeSyntaxRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_syntax] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def classify_text(self, + request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ClassifyTextResponse: + r"""Classifies a document into categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + def sample_classify_text(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = client.classify_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1.types.ClassifyTextRequest, dict]): + The request object. The document classification request + message. + document (google.cloud.language_v1.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.ClassifyTextResponse: + The document classification response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.ClassifyTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.ClassifyTextRequest): + request = language_service.ClassifyTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.classify_text] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def annotate_text(self, + request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + features: Optional[language_service.AnnotateTextRequest.Features] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnnotateTextResponse: + r"""A convenience method that provides all the features + that analyzeSentiment, analyzeEntities, and + analyzeSyntax provide in one call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + def sample_annotate_text(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = client.annotate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1.types.AnnotateTextRequest, dict]): + The request object. The request message for the text + annotation API, which can perform + multiple analysis types (sentiment, + entities, and syntax) in one call. + document (google.cloud.language_v1.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (google.cloud.language_v1.types.AnnotateTextRequest.Features): + Required. The enabled features. + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnnotateTextResponse: + The text annotations response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, features, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnnotateTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnnotateTextRequest): + request = language_service.AnnotateTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if features is not None: + request.features = features + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.annotate_text] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "LanguageServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "LanguageServiceClient", +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py new file mode 100644 index 00000000..3cb6ab92 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import LanguageServiceTransport +from .grpc import LanguageServiceGrpcTransport +from .grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .rest import LanguageServiceRestTransport +from .rest import LanguageServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] +_transport_registry['grpc'] = LanguageServiceGrpcTransport +_transport_registry['grpc_asyncio'] = LanguageServiceGrpcAsyncIOTransport +_transport_registry['rest'] = LanguageServiceRestTransport + +__all__ = ( + 'LanguageServiceTransport', + 'LanguageServiceGrpcTransport', + 'LanguageServiceGrpcAsyncIOTransport', + 'LanguageServiceRestTransport', + 'LanguageServiceRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py new file mode 100644 index 00000000..99429175 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.language_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.language_v1.types import language_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class LanguageServiceTransport(abc.ABC): + """Abstract transport class for LanguageService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'language.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.analyze_sentiment: gapic_v1.method.wrap_method( + self.analyze_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_entities: gapic_v1.method.wrap_method( + self.analyze_entities, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_entity_sentiment: gapic_v1.method.wrap_method( + self.analyze_entity_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_syntax: gapic_v1.method.wrap_method( + self.analyze_syntax, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.classify_text: gapic_v1.method.wrap_method( + self.classify_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.annotate_text: gapic_v1.method.wrap_method( + self.annotate_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + Union[ + language_service.AnalyzeSentimentResponse, + Awaitable[language_service.AnalyzeSentimentResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + Union[ + language_service.AnalyzeEntitiesResponse, + Awaitable[language_service.AnalyzeEntitiesResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + Union[ + language_service.AnalyzeEntitySentimentResponse, + Awaitable[language_service.AnalyzeEntitySentimentResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + Union[ + language_service.AnalyzeSyntaxResponse, + Awaitable[language_service.AnalyzeSyntaxResponse] + ]]: + raise NotImplementedError() + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + Union[ + language_service.ClassifyTextResponse, + Awaitable[language_service.ClassifyTextResponse] + ]]: + raise NotImplementedError() + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + Union[ + language_service.AnnotateTextResponse, + Awaitable[language_service.AnnotateTextResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'LanguageServiceTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py new file mode 100644 index 00000000..49ab72e3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py @@ -0,0 +1,405 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.language_v1.types import language_service +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO + + +class LanguageServiceGrpcTransport(LanguageServiceTransport): + """gRPC backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + language_service.AnalyzeSentimentResponse]: + r"""Return a callable for the analyze sentiment method over gRPC. + + Analyzes the sentiment of the provided text. + + Returns: + Callable[[~.AnalyzeSentimentRequest], + ~.AnalyzeSentimentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_sentiment' not in self._stubs: + self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSentiment', + request_serializer=language_service.AnalyzeSentimentRequest.serialize, + response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, + ) + return self._stubs['analyze_sentiment'] + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + language_service.AnalyzeEntitiesResponse]: + r"""Return a callable for the analyze entities method over gRPC. + + Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Returns: + Callable[[~.AnalyzeEntitiesRequest], + ~.AnalyzeEntitiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entities' not in self._stubs: + self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntities', + request_serializer=language_service.AnalyzeEntitiesRequest.serialize, + response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, + ) + return self._stubs['analyze_entities'] + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + language_service.AnalyzeEntitySentimentResponse]: + r"""Return a callable for the analyze entity sentiment method over gRPC. + + Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Returns: + Callable[[~.AnalyzeEntitySentimentRequest], + ~.AnalyzeEntitySentimentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entity_sentiment' not in self._stubs: + self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment', + request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, + response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, + ) + return self._stubs['analyze_entity_sentiment'] + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + language_service.AnalyzeSyntaxResponse]: + r"""Return a callable for the analyze syntax method over gRPC. + + Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + Returns: + Callable[[~.AnalyzeSyntaxRequest], + ~.AnalyzeSyntaxResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_syntax' not in self._stubs: + self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', + request_serializer=language_service.AnalyzeSyntaxRequest.serialize, + response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, + ) + return self._stubs['analyze_syntax'] + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + language_service.ClassifyTextResponse]: + r"""Return a callable for the classify text method over gRPC. + + Classifies a document into categories. + + Returns: + Callable[[~.ClassifyTextRequest], + ~.ClassifyTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'classify_text' not in self._stubs: + self._stubs['classify_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/ClassifyText', + request_serializer=language_service.ClassifyTextRequest.serialize, + response_deserializer=language_service.ClassifyTextResponse.deserialize, + ) + return self._stubs['classify_text'] + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + language_service.AnnotateTextResponse]: + r"""Return a callable for the annotate text method over gRPC. + + A convenience method that provides all the features + that analyzeSentiment, analyzeEntities, and + analyzeSyntax provide in one call. + + Returns: + Callable[[~.AnnotateTextRequest], + ~.AnnotateTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'annotate_text' not in self._stubs: + self._stubs['annotate_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnnotateText', + request_serializer=language_service.AnnotateTextRequest.serialize, + response_deserializer=language_service.AnnotateTextResponse.deserialize, + ) + return self._stubs['annotate_text'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'LanguageServiceGrpcTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..a95493e5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py @@ -0,0 +1,404 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.language_v1.types import language_service +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import LanguageServiceGrpcTransport + + +class LanguageServiceGrpcAsyncIOTransport(LanguageServiceTransport): + """gRPC AsyncIO backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + Awaitable[language_service.AnalyzeSentimentResponse]]: + r"""Return a callable for the analyze sentiment method over gRPC. + + Analyzes the sentiment of the provided text. + + Returns: + Callable[[~.AnalyzeSentimentRequest], + Awaitable[~.AnalyzeSentimentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_sentiment' not in self._stubs: + self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSentiment', + request_serializer=language_service.AnalyzeSentimentRequest.serialize, + response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, + ) + return self._stubs['analyze_sentiment'] + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + Awaitable[language_service.AnalyzeEntitiesResponse]]: + r"""Return a callable for the analyze entities method over gRPC. + + Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Returns: + Callable[[~.AnalyzeEntitiesRequest], + Awaitable[~.AnalyzeEntitiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entities' not in self._stubs: + self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntities', + request_serializer=language_service.AnalyzeEntitiesRequest.serialize, + response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, + ) + return self._stubs['analyze_entities'] + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + Awaitable[language_service.AnalyzeEntitySentimentResponse]]: + r"""Return a callable for the analyze entity sentiment method over gRPC. + + Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Returns: + Callable[[~.AnalyzeEntitySentimentRequest], + Awaitable[~.AnalyzeEntitySentimentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entity_sentiment' not in self._stubs: + self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment', + request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, + response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, + ) + return self._stubs['analyze_entity_sentiment'] + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + Awaitable[language_service.AnalyzeSyntaxResponse]]: + r"""Return a callable for the analyze syntax method over gRPC. + + Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + Returns: + Callable[[~.AnalyzeSyntaxRequest], + Awaitable[~.AnalyzeSyntaxResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_syntax' not in self._stubs: + self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', + request_serializer=language_service.AnalyzeSyntaxRequest.serialize, + response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, + ) + return self._stubs['analyze_syntax'] + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + Awaitable[language_service.ClassifyTextResponse]]: + r"""Return a callable for the classify text method over gRPC. + + Classifies a document into categories. + + Returns: + Callable[[~.ClassifyTextRequest], + Awaitable[~.ClassifyTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'classify_text' not in self._stubs: + self._stubs['classify_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/ClassifyText', + request_serializer=language_service.ClassifyTextRequest.serialize, + response_deserializer=language_service.ClassifyTextResponse.deserialize, + ) + return self._stubs['classify_text'] + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + Awaitable[language_service.AnnotateTextResponse]]: + r"""Return a callable for the annotate text method over gRPC. + + A convenience method that provides all the features + that analyzeSentiment, analyzeEntities, and + analyzeSyntax provide in one call. + + Returns: + Callable[[~.AnnotateTextRequest], + Awaitable[~.AnnotateTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'annotate_text' not in self._stubs: + self._stubs['annotate_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnnotateText', + request_serializer=language_service.AnnotateTextRequest.serialize, + response_deserializer=language_service.AnnotateTextResponse.deserialize, + ) + return self._stubs['annotate_text'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'LanguageServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py new file mode 100644 index 00000000..d4d50a2e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py @@ -0,0 +1,907 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.language_v1.types import language_service + +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class LanguageServiceRestInterceptor: + """Interceptor for LanguageService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the LanguageServiceRestTransport. + + .. code-block:: python + class MyCustomLanguageServiceInterceptor(LanguageServiceRestInterceptor): + def pre_analyze_entities(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_entities(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_entity_sentiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_entity_sentiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_sentiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_sentiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_syntax(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_syntax(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_annotate_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_annotate_text(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_classify_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_classify_text(self, response): + logging.log(f"Received response: {response}") + return response + + transport = LanguageServiceRestTransport(interceptor=MyCustomLanguageServiceInterceptor()) + client = LanguageServiceClient(transport=transport) + + + """ + def pre_analyze_entities(self, request: language_service.AnalyzeEntitiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_entities + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_entities(self, response: language_service.AnalyzeEntitiesResponse) -> language_service.AnalyzeEntitiesResponse: + """Post-rpc interceptor for analyze_entities + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_analyze_entity_sentiment(self, request: language_service.AnalyzeEntitySentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitySentimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_entity_sentiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_entity_sentiment(self, response: language_service.AnalyzeEntitySentimentResponse) -> language_service.AnalyzeEntitySentimentResponse: + """Post-rpc interceptor for analyze_entity_sentiment + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_analyze_sentiment(self, request: language_service.AnalyzeSentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSentimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_sentiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_sentiment(self, response: language_service.AnalyzeSentimentResponse) -> language_service.AnalyzeSentimentResponse: + """Post-rpc interceptor for analyze_sentiment + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_analyze_syntax(self, request: language_service.AnalyzeSyntaxRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSyntaxRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_syntax + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_syntax(self, response: language_service.AnalyzeSyntaxResponse) -> language_service.AnalyzeSyntaxResponse: + """Post-rpc interceptor for analyze_syntax + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_annotate_text(self, request: language_service.AnnotateTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnnotateTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for annotate_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_annotate_text(self, response: language_service.AnnotateTextResponse) -> language_service.AnnotateTextResponse: + """Post-rpc interceptor for annotate_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_classify_text(self, request: language_service.ClassifyTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.ClassifyTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for classify_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_classify_text(self, response: language_service.ClassifyTextResponse) -> language_service.ClassifyTextResponse: + """Post-rpc interceptor for classify_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class LanguageServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: LanguageServiceRestInterceptor + + +class LanguageServiceRestTransport(LanguageServiceTransport): + """REST backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[LanguageServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or LanguageServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AnalyzeEntities(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeEntities") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeEntitiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Call the analyze entities method over HTTP. + + Args: + request (~.language_service.AnalyzeEntitiesRequest): + The request object. The entity analysis request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/documents:analyzeEntities', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_entities(request, metadata) + pb_request = language_service.AnalyzeEntitiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeEntitiesResponse() + pb_resp = language_service.AnalyzeEntitiesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_entities(resp) + return resp + + class _AnalyzeEntitySentiment(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeEntitySentiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeEntitySentimentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Call the analyze entity sentiment method over HTTP. + + Args: + request (~.language_service.AnalyzeEntitySentimentRequest): + The request object. The entity-level sentiment analysis + request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/documents:analyzeEntitySentiment', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_entity_sentiment(request, metadata) + pb_request = language_service.AnalyzeEntitySentimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeEntitySentimentResponse() + pb_resp = language_service.AnalyzeEntitySentimentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_entity_sentiment(resp) + return resp + + class _AnalyzeSentiment(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeSentiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeSentimentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeSentimentResponse: + r"""Call the analyze sentiment method over HTTP. + + Args: + request (~.language_service.AnalyzeSentimentRequest): + The request object. The sentiment analysis request + message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/documents:analyzeSentiment', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_sentiment(request, metadata) + pb_request = language_service.AnalyzeSentimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeSentimentResponse() + pb_resp = language_service.AnalyzeSentimentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_sentiment(resp) + return resp + + class _AnalyzeSyntax(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeSyntax") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeSyntaxRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Call the analyze syntax method over HTTP. + + Args: + request (~.language_service.AnalyzeSyntaxRequest): + The request object. The syntax analysis request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/documents:analyzeSyntax', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_syntax(request, metadata) + pb_request = language_service.AnalyzeSyntaxRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeSyntaxResponse() + pb_resp = language_service.AnalyzeSyntaxResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_syntax(resp) + return resp + + class _AnnotateText(LanguageServiceRestStub): + def __hash__(self): + return hash("AnnotateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnnotateTextRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnnotateTextResponse: + r"""Call the annotate text method over HTTP. + + Args: + request (~.language_service.AnnotateTextRequest): + The request object. The request message for the text + annotation API, which can perform + multiple analysis types (sentiment, + entities, and syntax) in one call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnnotateTextResponse: + The text annotations response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/documents:annotateText', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_annotate_text(request, metadata) + pb_request = language_service.AnnotateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnnotateTextResponse() + pb_resp = language_service.AnnotateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_annotate_text(resp) + return resp + + class _ClassifyText(LanguageServiceRestStub): + def __hash__(self): + return hash("ClassifyText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.ClassifyTextRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.ClassifyTextResponse: + r"""Call the classify text method over HTTP. + + Args: + request (~.language_service.ClassifyTextRequest): + The request object. The document classification request + message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ClassifyTextResponse: + The document classification response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/documents:classifyText', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_classify_text(request, metadata) + pb_request = language_service.ClassifyTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.ClassifyTextResponse() + pb_resp = language_service.ClassifyTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_classify_text(resp) + return resp + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + language_service.AnalyzeEntitiesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeEntities(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + language_service.AnalyzeEntitySentimentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeEntitySentiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + language_service.AnalyzeSentimentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeSentiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + language_service.AnalyzeSyntaxResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeSyntax(self._session, self._host, self._interceptor) # type: ignore + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + language_service.AnnotateTextResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnnotateText(self._session, self._host, self._interceptor) # type: ignore + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + language_service.ClassifyTextResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ClassifyText(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'LanguageServiceRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py new file mode 100644 index 00000000..1b87226f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .language_service import ( + AnalyzeEntitiesRequest, + AnalyzeEntitiesResponse, + AnalyzeEntitySentimentRequest, + AnalyzeEntitySentimentResponse, + AnalyzeSentimentRequest, + AnalyzeSentimentResponse, + AnalyzeSyntaxRequest, + AnalyzeSyntaxResponse, + AnnotateTextRequest, + AnnotateTextResponse, + ClassificationCategory, + ClassificationModelOptions, + ClassifyTextRequest, + ClassifyTextResponse, + DependencyEdge, + Document, + Entity, + EntityMention, + PartOfSpeech, + Sentence, + Sentiment, + TextSpan, + Token, + EncodingType, +) + +__all__ = ( + 'AnalyzeEntitiesRequest', + 'AnalyzeEntitiesResponse', + 'AnalyzeEntitySentimentRequest', + 'AnalyzeEntitySentimentResponse', + 'AnalyzeSentimentRequest', + 'AnalyzeSentimentResponse', + 'AnalyzeSyntaxRequest', + 'AnalyzeSyntaxResponse', + 'AnnotateTextRequest', + 'AnnotateTextResponse', + 'ClassificationCategory', + 'ClassificationModelOptions', + 'ClassifyTextRequest', + 'ClassifyTextResponse', + 'DependencyEdge', + 'Document', + 'Entity', + 'EntityMention', + 'PartOfSpeech', + 'Sentence', + 'Sentiment', + 'TextSpan', + 'Token', + 'EncodingType', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py b/owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py new file mode 100644 index 00000000..69ce0bc3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py @@ -0,0 +1,1677 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.language.v1', + manifest={ + 'EncodingType', + 'Document', + 'Sentence', + 'Entity', + 'Token', + 'Sentiment', + 'PartOfSpeech', + 'DependencyEdge', + 'EntityMention', + 'TextSpan', + 'ClassificationCategory', + 'ClassificationModelOptions', + 'AnalyzeSentimentRequest', + 'AnalyzeSentimentResponse', + 'AnalyzeEntitySentimentRequest', + 'AnalyzeEntitySentimentResponse', + 'AnalyzeEntitiesRequest', + 'AnalyzeEntitiesResponse', + 'AnalyzeSyntaxRequest', + 'AnalyzeSyntaxResponse', + 'ClassifyTextRequest', + 'ClassifyTextResponse', + 'AnnotateTextRequest', + 'AnnotateTextResponse', + }, +) + + +class EncodingType(proto.Enum): + r"""Represents the text encoding that the caller uses to process the + output. Providing an ``EncodingType`` is recommended because the API + provides the beginning offsets for various outputs, such as tokens + and mentions, and languages that natively use different text + encodings may access offsets differently. + + Values: + NONE (0): + If ``EncodingType`` is not specified, encoding-dependent + information (such as ``begin_offset``) will be set at + ``-1``. + UTF8 (1): + Encoding-dependent information (such as ``begin_offset``) is + calculated based on the UTF-8 encoding of the input. C++ and + Go are examples of languages that use this encoding + natively. + UTF16 (2): + Encoding-dependent information (such as ``begin_offset``) is + calculated based on the UTF-16 encoding of the input. Java + and JavaScript are examples of languages that use this + encoding natively. + UTF32 (3): + Encoding-dependent information (such as ``begin_offset``) is + calculated based on the UTF-32 encoding of the input. Python + is an example of a language that uses this encoding + natively. + """ + NONE = 0 + UTF8 = 1 + UTF16 = 2 + UTF32 = 3 + + +class Document(proto.Message): + r"""Represents the input to API methods. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.language_v1.types.Document.Type): + Required. If the type is not set or is ``TYPE_UNSPECIFIED``, + returns an ``INVALID_ARGUMENT`` error. + content (str): + The content of the input in string format. + Cloud audit logging exempt since it is based on + user data. + + This field is a member of `oneof`_ ``source``. + gcs_content_uri (str): + The Google Cloud Storage URI where the file content is + located. This URI must be of the form: + gs://bucket_name/object_name. For more details, see + https://cloud.google.com/storage/docs/reference-uris. NOTE: + Cloud Storage object versioning is not supported. + + This field is a member of `oneof`_ ``source``. + language (str): + The language of the document (if not specified, the language + is automatically detected). Both ISO and BCP-47 language + codes are accepted. `Language + Support `__ + lists currently supported languages for each API method. If + the language (either specified by the caller or + automatically detected) is not supported by the called API + method, an ``INVALID_ARGUMENT`` error is returned. + """ + class Type(proto.Enum): + r"""The document types enum. + + Values: + TYPE_UNSPECIFIED (0): + The content type is not specified. + PLAIN_TEXT (1): + Plain text + HTML (2): + HTML + """ + TYPE_UNSPECIFIED = 0 + PLAIN_TEXT = 1 + HTML = 2 + + type_: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + content: str = proto.Field( + proto.STRING, + number=2, + oneof='source', + ) + gcs_content_uri: str = proto.Field( + proto.STRING, + number=3, + oneof='source', + ) + language: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Sentence(proto.Message): + r"""Represents a sentence in the input document. + + Attributes: + text (google.cloud.language_v1.types.TextSpan): + The sentence text. + sentiment (google.cloud.language_v1.types.Sentiment): + For calls to [AnalyzeSentiment][] or if + [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment] + is set to true, this field will contain the sentiment for + the sentence. + """ + + text: 'TextSpan' = proto.Field( + proto.MESSAGE, + number=1, + message='TextSpan', + ) + sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=2, + message='Sentiment', + ) + + +class Entity(proto.Message): + r"""Represents a phrase in the text that is a known entity, such + as a person, an organization, or location. The API associates + information, such as salience and mentions, with entities. + + Attributes: + name (str): + The representative name for the entity. + type_ (google.cloud.language_v1.types.Entity.Type): + The entity type. + metadata (MutableMapping[str, str]): + Metadata associated with the entity. + + For most entity types, the metadata is a Wikipedia URL + (``wikipedia_url``) and Knowledge Graph MID (``mid``), if + they are available. For the metadata associated with other + entity types, see the Type table below. + salience (float): + The salience score associated with the entity in the [0, + 1.0] range. + + The salience score for an entity provides information about + the importance or centrality of that entity to the entire + document text. Scores closer to 0 are less salient, while + scores closer to 1.0 are highly salient. + mentions (MutableSequence[google.cloud.language_v1.types.EntityMention]): + The mentions of this entity in the input + document. The API currently supports proper noun + mentions. + sentiment (google.cloud.language_v1.types.Sentiment): + For calls to [AnalyzeEntitySentiment][] or if + [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entity_sentiment] + is set to true, this field will contain the aggregate + sentiment expressed for this entity in the provided + document. + """ + class Type(proto.Enum): + r"""The type of the entity. For most entity types, the associated + metadata is a Wikipedia URL (``wikipedia_url``) and Knowledge Graph + MID (``mid``). The table below lists the associated fields for + entities that have different metadata. + + Values: + UNKNOWN (0): + Unknown + PERSON (1): + Person + LOCATION (2): + Location + ORGANIZATION (3): + Organization + EVENT (4): + Event + WORK_OF_ART (5): + Artwork + CONSUMER_GOOD (6): + Consumer product + OTHER (7): + Other types of entities + PHONE_NUMBER (9): + Phone number + + The metadata lists the phone number, formatted according to + local convention, plus whichever additional elements appear + in the text: + + - ``number`` - the actual number, broken down into sections + as per local convention + - ``national_prefix`` - country code, if detected + - ``area_code`` - region or area code, if detected + - ``extension`` - phone extension (to be dialed after + connection), if detected + ADDRESS (10): + Address + + The metadata identifies the street number and locality plus + whichever additional elements appear in the text: + + - ``street_number`` - street number + - ``locality`` - city or town + - ``street_name`` - street/route name, if detected + - ``postal_code`` - postal code, if detected + - ``country`` - country, if detected< + - ``broad_region`` - administrative area, such as the + state, if detected + - ``narrow_region`` - smaller administrative area, such as + county, if detected + - ``sublocality`` - used in Asian addresses to demark a + district within a city, if detected + DATE (11): + Date + + The metadata identifies the components of the date: + + - ``year`` - four digit year, if detected + - ``month`` - two digit month number, if detected + - ``day`` - two digit day number, if detected + NUMBER (12): + Number + The metadata is the number itself. + PRICE (13): + Price + + The metadata identifies the ``value`` and ``currency``. + """ + UNKNOWN = 0 + PERSON = 1 + LOCATION = 2 + ORGANIZATION = 3 + EVENT = 4 + WORK_OF_ART = 5 + CONSUMER_GOOD = 6 + OTHER = 7 + PHONE_NUMBER = 9 + ADDRESS = 10 + DATE = 11 + NUMBER = 12 + PRICE = 13 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: Type = proto.Field( + proto.ENUM, + number=2, + enum=Type, + ) + metadata: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + salience: float = proto.Field( + proto.FLOAT, + number=4, + ) + mentions: MutableSequence['EntityMention'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='EntityMention', + ) + sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=6, + message='Sentiment', + ) + + +class Token(proto.Message): + r"""Represents the smallest syntactic building block of the text. + + Attributes: + text (google.cloud.language_v1.types.TextSpan): + The token text. + part_of_speech (google.cloud.language_v1.types.PartOfSpeech): + Parts of speech tag for this token. + dependency_edge (google.cloud.language_v1.types.DependencyEdge): + Dependency tree parse for this token. + lemma (str): + `Lemma `__ + of the token. + """ + + text: 'TextSpan' = proto.Field( + proto.MESSAGE, + number=1, + message='TextSpan', + ) + part_of_speech: 'PartOfSpeech' = proto.Field( + proto.MESSAGE, + number=2, + message='PartOfSpeech', + ) + dependency_edge: 'DependencyEdge' = proto.Field( + proto.MESSAGE, + number=3, + message='DependencyEdge', + ) + lemma: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Sentiment(proto.Message): + r"""Represents the feeling associated with the entire text or + entities in the text. + + Attributes: + magnitude (float): + A non-negative number in the [0, +inf) range, which + represents the absolute magnitude of sentiment regardless of + score (positive or negative). + score (float): + Sentiment score between -1.0 (negative + sentiment) and 1.0 (positive sentiment). + """ + + magnitude: float = proto.Field( + proto.FLOAT, + number=2, + ) + score: float = proto.Field( + proto.FLOAT, + number=3, + ) + + +class PartOfSpeech(proto.Message): + r"""Represents part of speech information for a token. Parts of speech + are as defined in + http://www.lrec-conf.org/proceedings/lrec2012/pdf/274_Paper.pdf + + Attributes: + tag (google.cloud.language_v1.types.PartOfSpeech.Tag): + The part of speech tag. + aspect (google.cloud.language_v1.types.PartOfSpeech.Aspect): + The grammatical aspect. + case (google.cloud.language_v1.types.PartOfSpeech.Case): + The grammatical case. + form (google.cloud.language_v1.types.PartOfSpeech.Form): + The grammatical form. + gender (google.cloud.language_v1.types.PartOfSpeech.Gender): + The grammatical gender. + mood (google.cloud.language_v1.types.PartOfSpeech.Mood): + The grammatical mood. + number (google.cloud.language_v1.types.PartOfSpeech.Number): + The grammatical number. + person (google.cloud.language_v1.types.PartOfSpeech.Person): + The grammatical person. + proper (google.cloud.language_v1.types.PartOfSpeech.Proper): + The grammatical properness. + reciprocity (google.cloud.language_v1.types.PartOfSpeech.Reciprocity): + The grammatical reciprocity. + tense (google.cloud.language_v1.types.PartOfSpeech.Tense): + The grammatical tense. + voice (google.cloud.language_v1.types.PartOfSpeech.Voice): + The grammatical voice. + """ + class Tag(proto.Enum): + r"""The part of speech tags enum. + + Values: + UNKNOWN (0): + Unknown + ADJ (1): + Adjective + ADP (2): + Adposition (preposition and postposition) + ADV (3): + Adverb + CONJ (4): + Conjunction + DET (5): + Determiner + NOUN (6): + Noun (common and proper) + NUM (7): + Cardinal number + PRON (8): + Pronoun + PRT (9): + Particle or other function word + PUNCT (10): + Punctuation + VERB (11): + Verb (all tenses and modes) + X (12): + Other: foreign words, typos, abbreviations + AFFIX (13): + Affix + """ + UNKNOWN = 0 + ADJ = 1 + ADP = 2 + ADV = 3 + CONJ = 4 + DET = 5 + NOUN = 6 + NUM = 7 + PRON = 8 + PRT = 9 + PUNCT = 10 + VERB = 11 + X = 12 + AFFIX = 13 + + class Aspect(proto.Enum): + r"""The characteristic of a verb that expresses time flow during + an event. + + Values: + ASPECT_UNKNOWN (0): + Aspect is not applicable in the analyzed + language or is not predicted. + PERFECTIVE (1): + Perfective + IMPERFECTIVE (2): + Imperfective + PROGRESSIVE (3): + Progressive + """ + ASPECT_UNKNOWN = 0 + PERFECTIVE = 1 + IMPERFECTIVE = 2 + PROGRESSIVE = 3 + + class Case(proto.Enum): + r"""The grammatical function performed by a noun or pronoun in a + phrase, clause, or sentence. In some languages, other parts of + speech, such as adjective and determiner, take case inflection + in agreement with the noun. + + Values: + CASE_UNKNOWN (0): + Case is not applicable in the analyzed + language or is not predicted. + ACCUSATIVE (1): + Accusative + ADVERBIAL (2): + Adverbial + COMPLEMENTIVE (3): + Complementive + DATIVE (4): + Dative + GENITIVE (5): + Genitive + INSTRUMENTAL (6): + Instrumental + LOCATIVE (7): + Locative + NOMINATIVE (8): + Nominative + OBLIQUE (9): + Oblique + PARTITIVE (10): + Partitive + PREPOSITIONAL (11): + Prepositional + REFLEXIVE_CASE (12): + Reflexive + RELATIVE_CASE (13): + Relative + VOCATIVE (14): + Vocative + """ + CASE_UNKNOWN = 0 + ACCUSATIVE = 1 + ADVERBIAL = 2 + COMPLEMENTIVE = 3 + DATIVE = 4 + GENITIVE = 5 + INSTRUMENTAL = 6 + LOCATIVE = 7 + NOMINATIVE = 8 + OBLIQUE = 9 + PARTITIVE = 10 + PREPOSITIONAL = 11 + REFLEXIVE_CASE = 12 + RELATIVE_CASE = 13 + VOCATIVE = 14 + + class Form(proto.Enum): + r"""Depending on the language, Form can be categorizing different + forms of verbs, adjectives, adverbs, etc. For example, + categorizing inflected endings of verbs and adjectives or + distinguishing between short and long forms of adjectives and + participles + + Values: + FORM_UNKNOWN (0): + Form is not applicable in the analyzed + language or is not predicted. + ADNOMIAL (1): + Adnomial + AUXILIARY (2): + Auxiliary + COMPLEMENTIZER (3): + Complementizer + FINAL_ENDING (4): + Final ending + GERUND (5): + Gerund + REALIS (6): + Realis + IRREALIS (7): + Irrealis + SHORT (8): + Short form + LONG (9): + Long form + ORDER (10): + Order form + SPECIFIC (11): + Specific form + """ + FORM_UNKNOWN = 0 + ADNOMIAL = 1 + AUXILIARY = 2 + COMPLEMENTIZER = 3 + FINAL_ENDING = 4 + GERUND = 5 + REALIS = 6 + IRREALIS = 7 + SHORT = 8 + LONG = 9 + ORDER = 10 + SPECIFIC = 11 + + class Gender(proto.Enum): + r"""Gender classes of nouns reflected in the behaviour of + associated words. + + Values: + GENDER_UNKNOWN (0): + Gender is not applicable in the analyzed + language or is not predicted. + FEMININE (1): + Feminine + MASCULINE (2): + Masculine + NEUTER (3): + Neuter + """ + GENDER_UNKNOWN = 0 + FEMININE = 1 + MASCULINE = 2 + NEUTER = 3 + + class Mood(proto.Enum): + r"""The grammatical feature of verbs, used for showing modality + and attitude. + + Values: + MOOD_UNKNOWN (0): + Mood is not applicable in the analyzed + language or is not predicted. + CONDITIONAL_MOOD (1): + Conditional + IMPERATIVE (2): + Imperative + INDICATIVE (3): + Indicative + INTERROGATIVE (4): + Interrogative + JUSSIVE (5): + Jussive + SUBJUNCTIVE (6): + Subjunctive + """ + MOOD_UNKNOWN = 0 + CONDITIONAL_MOOD = 1 + IMPERATIVE = 2 + INDICATIVE = 3 + INTERROGATIVE = 4 + JUSSIVE = 5 + SUBJUNCTIVE = 6 + + class Number(proto.Enum): + r"""Count distinctions. + + Values: + NUMBER_UNKNOWN (0): + Number is not applicable in the analyzed + language or is not predicted. + SINGULAR (1): + Singular + PLURAL (2): + Plural + DUAL (3): + Dual + """ + NUMBER_UNKNOWN = 0 + SINGULAR = 1 + PLURAL = 2 + DUAL = 3 + + class Person(proto.Enum): + r"""The distinction between the speaker, second person, third + person, etc. + + Values: + PERSON_UNKNOWN (0): + Person is not applicable in the analyzed + language or is not predicted. + FIRST (1): + First + SECOND (2): + Second + THIRD (3): + Third + REFLEXIVE_PERSON (4): + Reflexive + """ + PERSON_UNKNOWN = 0 + FIRST = 1 + SECOND = 2 + THIRD = 3 + REFLEXIVE_PERSON = 4 + + class Proper(proto.Enum): + r"""This category shows if the token is part of a proper name. + + Values: + PROPER_UNKNOWN (0): + Proper is not applicable in the analyzed + language or is not predicted. + PROPER (1): + Proper + NOT_PROPER (2): + Not proper + """ + PROPER_UNKNOWN = 0 + PROPER = 1 + NOT_PROPER = 2 + + class Reciprocity(proto.Enum): + r"""Reciprocal features of a pronoun. + + Values: + RECIPROCITY_UNKNOWN (0): + Reciprocity is not applicable in the analyzed + language or is not predicted. + RECIPROCAL (1): + Reciprocal + NON_RECIPROCAL (2): + Non-reciprocal + """ + RECIPROCITY_UNKNOWN = 0 + RECIPROCAL = 1 + NON_RECIPROCAL = 2 + + class Tense(proto.Enum): + r"""Time reference. + + Values: + TENSE_UNKNOWN (0): + Tense is not applicable in the analyzed + language or is not predicted. + CONDITIONAL_TENSE (1): + Conditional + FUTURE (2): + Future + PAST (3): + Past + PRESENT (4): + Present + IMPERFECT (5): + Imperfect + PLUPERFECT (6): + Pluperfect + """ + TENSE_UNKNOWN = 0 + CONDITIONAL_TENSE = 1 + FUTURE = 2 + PAST = 3 + PRESENT = 4 + IMPERFECT = 5 + PLUPERFECT = 6 + + class Voice(proto.Enum): + r"""The relationship between the action that a verb expresses and + the participants identified by its arguments. + + Values: + VOICE_UNKNOWN (0): + Voice is not applicable in the analyzed + language or is not predicted. + ACTIVE (1): + Active + CAUSATIVE (2): + Causative + PASSIVE (3): + Passive + """ + VOICE_UNKNOWN = 0 + ACTIVE = 1 + CAUSATIVE = 2 + PASSIVE = 3 + + tag: Tag = proto.Field( + proto.ENUM, + number=1, + enum=Tag, + ) + aspect: Aspect = proto.Field( + proto.ENUM, + number=2, + enum=Aspect, + ) + case: Case = proto.Field( + proto.ENUM, + number=3, + enum=Case, + ) + form: Form = proto.Field( + proto.ENUM, + number=4, + enum=Form, + ) + gender: Gender = proto.Field( + proto.ENUM, + number=5, + enum=Gender, + ) + mood: Mood = proto.Field( + proto.ENUM, + number=6, + enum=Mood, + ) + number: Number = proto.Field( + proto.ENUM, + number=7, + enum=Number, + ) + person: Person = proto.Field( + proto.ENUM, + number=8, + enum=Person, + ) + proper: Proper = proto.Field( + proto.ENUM, + number=9, + enum=Proper, + ) + reciprocity: Reciprocity = proto.Field( + proto.ENUM, + number=10, + enum=Reciprocity, + ) + tense: Tense = proto.Field( + proto.ENUM, + number=11, + enum=Tense, + ) + voice: Voice = proto.Field( + proto.ENUM, + number=12, + enum=Voice, + ) + + +class DependencyEdge(proto.Message): + r"""Represents dependency parse tree information for a token. + (For more information on dependency labels, see + http://www.aclweb.org/anthology/P13-2017 + + Attributes: + head_token_index (int): + Represents the head of this token in the dependency tree. + This is the index of the token which has an arc going to + this token. The index is the position of the token in the + array of tokens returned by the API method. If this token is + a root token, then the ``head_token_index`` is its own + index. + label (google.cloud.language_v1.types.DependencyEdge.Label): + The parse label for the token. + """ + class Label(proto.Enum): + r"""The parse label enum for the token. + + Values: + UNKNOWN (0): + Unknown + ABBREV (1): + Abbreviation modifier + ACOMP (2): + Adjectival complement + ADVCL (3): + Adverbial clause modifier + ADVMOD (4): + Adverbial modifier + AMOD (5): + Adjectival modifier of an NP + APPOS (6): + Appositional modifier of an NP + ATTR (7): + Attribute dependent of a copular verb + AUX (8): + Auxiliary (non-main) verb + AUXPASS (9): + Passive auxiliary + CC (10): + Coordinating conjunction + CCOMP (11): + Clausal complement of a verb or adjective + CONJ (12): + Conjunct + CSUBJ (13): + Clausal subject + CSUBJPASS (14): + Clausal passive subject + DEP (15): + Dependency (unable to determine) + DET (16): + Determiner + DISCOURSE (17): + Discourse + DOBJ (18): + Direct object + EXPL (19): + Expletive + GOESWITH (20): + Goes with (part of a word in a text not well + edited) + IOBJ (21): + Indirect object + MARK (22): + Marker (word introducing a subordinate + clause) + MWE (23): + Multi-word expression + MWV (24): + Multi-word verbal expression + NEG (25): + Negation modifier + NN (26): + Noun compound modifier + NPADVMOD (27): + Noun phrase used as an adverbial modifier + NSUBJ (28): + Nominal subject + NSUBJPASS (29): + Passive nominal subject + NUM (30): + Numeric modifier of a noun + NUMBER (31): + Element of compound number + P (32): + Punctuation mark + PARATAXIS (33): + Parataxis relation + PARTMOD (34): + Participial modifier + PCOMP (35): + The complement of a preposition is a clause + POBJ (36): + Object of a preposition + POSS (37): + Possession modifier + POSTNEG (38): + Postverbal negative particle + PRECOMP (39): + Predicate complement + PRECONJ (40): + Preconjunt + PREDET (41): + Predeterminer + PREF (42): + Prefix + PREP (43): + Prepositional modifier + PRONL (44): + The relationship between a verb and verbal + morpheme + PRT (45): + Particle + PS (46): + Associative or possessive marker + QUANTMOD (47): + Quantifier phrase modifier + RCMOD (48): + Relative clause modifier + RCMODREL (49): + Complementizer in relative clause + RDROP (50): + Ellipsis without a preceding predicate + REF (51): + Referent + REMNANT (52): + Remnant + REPARANDUM (53): + Reparandum + ROOT (54): + Root + SNUM (55): + Suffix specifying a unit of number + SUFF (56): + Suffix + TMOD (57): + Temporal modifier + TOPIC (58): + Topic marker + VMOD (59): + Clause headed by an infinite form of the verb + that modifies a noun + VOCATIVE (60): + Vocative + XCOMP (61): + Open clausal complement + SUFFIX (62): + Name suffix + TITLE (63): + Name title + ADVPHMOD (64): + Adverbial phrase modifier + AUXCAUS (65): + Causative auxiliary + AUXVV (66): + Helper auxiliary + DTMOD (67): + Rentaishi (Prenominal modifier) + FOREIGN (68): + Foreign words + KW (69): + Keyword + LIST (70): + List for chains of comparable items + NOMC (71): + Nominalized clause + NOMCSUBJ (72): + Nominalized clausal subject + NOMCSUBJPASS (73): + Nominalized clausal passive + NUMC (74): + Compound of numeric modifier + COP (75): + Copula + DISLOCATED (76): + Dislocated relation (for fronted/topicalized + elements) + ASP (77): + Aspect marker + GMOD (78): + Genitive modifier + GOBJ (79): + Genitive object + INFMOD (80): + Infinitival modifier + MES (81): + Measure + NCOMP (82): + Nominal complement of a noun + """ + UNKNOWN = 0 + ABBREV = 1 + ACOMP = 2 + ADVCL = 3 + ADVMOD = 4 + AMOD = 5 + APPOS = 6 + ATTR = 7 + AUX = 8 + AUXPASS = 9 + CC = 10 + CCOMP = 11 + CONJ = 12 + CSUBJ = 13 + CSUBJPASS = 14 + DEP = 15 + DET = 16 + DISCOURSE = 17 + DOBJ = 18 + EXPL = 19 + GOESWITH = 20 + IOBJ = 21 + MARK = 22 + MWE = 23 + MWV = 24 + NEG = 25 + NN = 26 + NPADVMOD = 27 + NSUBJ = 28 + NSUBJPASS = 29 + NUM = 30 + NUMBER = 31 + P = 32 + PARATAXIS = 33 + PARTMOD = 34 + PCOMP = 35 + POBJ = 36 + POSS = 37 + POSTNEG = 38 + PRECOMP = 39 + PRECONJ = 40 + PREDET = 41 + PREF = 42 + PREP = 43 + PRONL = 44 + PRT = 45 + PS = 46 + QUANTMOD = 47 + RCMOD = 48 + RCMODREL = 49 + RDROP = 50 + REF = 51 + REMNANT = 52 + REPARANDUM = 53 + ROOT = 54 + SNUM = 55 + SUFF = 56 + TMOD = 57 + TOPIC = 58 + VMOD = 59 + VOCATIVE = 60 + XCOMP = 61 + SUFFIX = 62 + TITLE = 63 + ADVPHMOD = 64 + AUXCAUS = 65 + AUXVV = 66 + DTMOD = 67 + FOREIGN = 68 + KW = 69 + LIST = 70 + NOMC = 71 + NOMCSUBJ = 72 + NOMCSUBJPASS = 73 + NUMC = 74 + COP = 75 + DISLOCATED = 76 + ASP = 77 + GMOD = 78 + GOBJ = 79 + INFMOD = 80 + MES = 81 + NCOMP = 82 + + head_token_index: int = proto.Field( + proto.INT32, + number=1, + ) + label: Label = proto.Field( + proto.ENUM, + number=2, + enum=Label, + ) + + +class EntityMention(proto.Message): + r"""Represents a mention for an entity in the text. Currently, + proper noun mentions are supported. + + Attributes: + text (google.cloud.language_v1.types.TextSpan): + The mention text. + type_ (google.cloud.language_v1.types.EntityMention.Type): + The type of the entity mention. + sentiment (google.cloud.language_v1.types.Sentiment): + For calls to [AnalyzeEntitySentiment][] or if + [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entity_sentiment] + is set to true, this field will contain the sentiment + expressed for this mention of the entity in the provided + document. + """ + class Type(proto.Enum): + r"""The supported types of mentions. + + Values: + TYPE_UNKNOWN (0): + Unknown + PROPER (1): + Proper name + COMMON (2): + Common noun (or noun compound) + """ + TYPE_UNKNOWN = 0 + PROPER = 1 + COMMON = 2 + + text: 'TextSpan' = proto.Field( + proto.MESSAGE, + number=1, + message='TextSpan', + ) + type_: Type = proto.Field( + proto.ENUM, + number=2, + enum=Type, + ) + sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=3, + message='Sentiment', + ) + + +class TextSpan(proto.Message): + r"""Represents an output piece of text. + + Attributes: + content (str): + The content of the output text. + begin_offset (int): + The API calculates the beginning offset of the content in + the original document according to the + [EncodingType][google.cloud.language.v1.EncodingType] + specified in the API request. + """ + + content: str = proto.Field( + proto.STRING, + number=1, + ) + begin_offset: int = proto.Field( + proto.INT32, + number=2, + ) + + +class ClassificationCategory(proto.Message): + r"""Represents a category returned from the text classifier. + + Attributes: + name (str): + The name of the category representing the document, from the + `predefined + taxonomy `__. + confidence (float): + The classifier's confidence of the category. + Number represents how certain the classifier is + that this category represents the given text. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class ClassificationModelOptions(proto.Message): + r"""Model options available for classification requests. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + v1_model (google.cloud.language_v1.types.ClassificationModelOptions.V1Model): + Setting this field will use the V1 model and + V1 content categories version. The V1 model is a + legacy model; support for this will be + discontinued in the future. + + This field is a member of `oneof`_ ``model_type``. + v2_model (google.cloud.language_v1.types.ClassificationModelOptions.V2Model): + Setting this field will use the V2 model with + the appropriate content categories version. The + V2 model is a better performing model. + + This field is a member of `oneof`_ ``model_type``. + """ + + class V1Model(proto.Message): + r"""Options for the V1 model. + """ + + class V2Model(proto.Message): + r"""Options for the V2 model. + + Attributes: + content_categories_version (google.cloud.language_v1.types.ClassificationModelOptions.V2Model.ContentCategoriesVersion): + The content categories used for + classification. + """ + class ContentCategoriesVersion(proto.Enum): + r"""The content categories used for classification. + + Values: + CONTENT_CATEGORIES_VERSION_UNSPECIFIED (0): + If ``ContentCategoriesVersion`` is not specified, this + option will default to ``V1``. + V1 (1): + Legacy content categories of our initial + launch in 2017. + V2 (2): + Updated content categories in 2022. + """ + CONTENT_CATEGORIES_VERSION_UNSPECIFIED = 0 + V1 = 1 + V2 = 2 + + content_categories_version: 'ClassificationModelOptions.V2Model.ContentCategoriesVersion' = proto.Field( + proto.ENUM, + number=1, + enum='ClassificationModelOptions.V2Model.ContentCategoriesVersion', + ) + + v1_model: V1Model = proto.Field( + proto.MESSAGE, + number=1, + oneof='model_type', + message=V1Model, + ) + v2_model: V2Model = proto.Field( + proto.MESSAGE, + number=2, + oneof='model_type', + message=V2Model, + ) + + +class AnalyzeSentimentRequest(proto.Message): + r"""The sentiment analysis request message. + + Attributes: + document (google.cloud.language_v1.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate sentence offsets. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeSentimentResponse(proto.Message): + r"""The sentiment analysis response message. + + Attributes: + document_sentiment (google.cloud.language_v1.types.Sentiment): + The overall sentiment of the input document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + sentences (MutableSequence[google.cloud.language_v1.types.Sentence]): + The sentiment for all the sentences in the + document. + """ + + document_sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=1, + message='Sentiment', + ) + language: str = proto.Field( + proto.STRING, + number=2, + ) + sentences: MutableSequence['Sentence'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Sentence', + ) + + +class AnalyzeEntitySentimentRequest(proto.Message): + r"""The entity-level sentiment analysis request message. + + Attributes: + document (google.cloud.language_v1.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeEntitySentimentResponse(proto.Message): + r"""The entity-level sentiment analysis response message. + + Attributes: + entities (MutableSequence[google.cloud.language_v1.types.Entity]): + The recognized entities in the input document + with associated sentiments. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + """ + + entities: MutableSequence['Entity'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Entity', + ) + language: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AnalyzeEntitiesRequest(proto.Message): + r"""The entity analysis request message. + + Attributes: + document (google.cloud.language_v1.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeEntitiesResponse(proto.Message): + r"""The entity analysis response message. + + Attributes: + entities (MutableSequence[google.cloud.language_v1.types.Entity]): + The recognized entities in the input + document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + """ + + entities: MutableSequence['Entity'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Entity', + ) + language: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AnalyzeSyntaxRequest(proto.Message): + r"""The syntax analysis request message. + + Attributes: + document (google.cloud.language_v1.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeSyntaxResponse(proto.Message): + r"""The syntax analysis response message. + + Attributes: + sentences (MutableSequence[google.cloud.language_v1.types.Sentence]): + Sentences in the input document. + tokens (MutableSequence[google.cloud.language_v1.types.Token]): + Tokens, along with their syntactic + information, in the input document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + """ + + sentences: MutableSequence['Sentence'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Sentence', + ) + tokens: MutableSequence['Token'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='Token', + ) + language: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ClassifyTextRequest(proto.Message): + r"""The document classification request message. + + Attributes: + document (google.cloud.language_v1.types.Document): + Required. Input document. + classification_model_options (google.cloud.language_v1.types.ClassificationModelOptions): + Model options to use for classification. + Defaults to v1 options if not specified. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + classification_model_options: 'ClassificationModelOptions' = proto.Field( + proto.MESSAGE, + number=3, + message='ClassificationModelOptions', + ) + + +class ClassifyTextResponse(proto.Message): + r"""The document classification response message. + + Attributes: + categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): + Categories representing the input document. + """ + + categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ClassificationCategory', + ) + + +class AnnotateTextRequest(proto.Message): + r"""The request message for the text annotation API, which can + perform multiple analysis types (sentiment, entities, and + syntax) in one call. + + Attributes: + document (google.cloud.language_v1.types.Document): + Required. Input document. + features (google.cloud.language_v1.types.AnnotateTextRequest.Features): + Required. The enabled features. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + class Features(proto.Message): + r"""All available features for sentiment, syntax, and semantic + analysis. Setting each one to true will enable that specific + analysis for the input. + + Attributes: + extract_syntax (bool): + Extract syntax information. + extract_entities (bool): + Extract entities. + extract_document_sentiment (bool): + Extract document-level sentiment. + extract_entity_sentiment (bool): + Extract entities and their associated + sentiment. + classify_text (bool): + Classify the full document into categories. + classification_model_options (google.cloud.language_v1.types.ClassificationModelOptions): + The model options to use for classification. Defaults to v1 + options if not specified. Only used if ``classify_text`` is + set to true. + """ + + extract_syntax: bool = proto.Field( + proto.BOOL, + number=1, + ) + extract_entities: bool = proto.Field( + proto.BOOL, + number=2, + ) + extract_document_sentiment: bool = proto.Field( + proto.BOOL, + number=3, + ) + extract_entity_sentiment: bool = proto.Field( + proto.BOOL, + number=4, + ) + classify_text: bool = proto.Field( + proto.BOOL, + number=6, + ) + classification_model_options: 'ClassificationModelOptions' = proto.Field( + proto.MESSAGE, + number=10, + message='ClassificationModelOptions', + ) + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + features: Features = proto.Field( + proto.MESSAGE, + number=2, + message=Features, + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=3, + enum='EncodingType', + ) + + +class AnnotateTextResponse(proto.Message): + r"""The text annotations response message. + + Attributes: + sentences (MutableSequence[google.cloud.language_v1.types.Sentence]): + Sentences in the input document. Populated if the user + enables + [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax]. + tokens (MutableSequence[google.cloud.language_v1.types.Token]): + Tokens, along with their syntactic information, in the input + document. Populated if the user enables + [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax]. + entities (MutableSequence[google.cloud.language_v1.types.Entity]): + Entities, along with their semantic information, in the + input document. Populated if the user enables + [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entities]. + document_sentiment (google.cloud.language_v1.types.Sentiment): + The overall sentiment for the document. Populated if the + user enables + [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment]. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): + Categories identified in the input document. + """ + + sentences: MutableSequence['Sentence'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Sentence', + ) + tokens: MutableSequence['Token'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='Token', + ) + entities: MutableSequence['Entity'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Entity', + ) + document_sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=4, + message='Sentiment', + ) + language: str = proto.Field( + proto.STRING, + number=5, + ) + categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='ClassificationCategory', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini new file mode 100644 index 00000000..574c5aed --- /dev/null +++ b/owl-bot-staging/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py new file mode 100644 index 00000000..b104aa2e --- /dev/null +++ b/owl-bot-staging/v1/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/language_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py new file mode 100644 index 00000000..71f2d049 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +async def sample_analyze_entities(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entities(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeEntities_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py new file mode 100644 index 00000000..14beb557 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +def sample_analyze_entities(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = client.analyze_entities(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeEntities_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py new file mode 100644 index 00000000..a8a1b59b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntitySentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeEntitySentiment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +async def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeEntitySentiment_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py new file mode 100644 index 00000000..c6d27ac8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntitySentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeEntitySentiment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeEntitySentiment_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py new file mode 100644 index 00000000..6b65f274 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeSentiment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +async def sample_analyze_sentiment(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeSentiment_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py new file mode 100644 index 00000000..c9a48df7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeSentiment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +def sample_analyze_sentiment(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeSentiment_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py new file mode 100644 index 00000000..31640e52 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSyntax +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeSyntax_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +async def sample_analyze_syntax(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = await client.analyze_syntax(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeSyntax_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py new file mode 100644 index 00000000..947613db --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSyntax +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeSyntax_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +def sample_analyze_syntax(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = client.analyze_syntax(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeSyntax_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py new file mode 100644 index 00000000..02a54aee --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnnotateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnnotateText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +async def sample_annotate_text(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = await client.annotate_text(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnnotateText_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py new file mode 100644 index 00000000..9d90a0f0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnnotateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnnotateText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +def sample_annotate_text(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = client.annotate_text(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnnotateText_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py new file mode 100644 index 00000000..a6497c09 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ClassifyText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_ClassifyText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +async def sample_classify_text(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = await client.classify_text(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_ClassifyText_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py new file mode 100644 index 00000000..e1d32646 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ClassifyText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_ClassifyText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +def sample_classify_text(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = client.classify_text(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_ClassifyText_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json new file mode 100644 index 00000000..936a8b70 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json @@ -0,0 +1,1029 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.language.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-language", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_entities", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntities", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeEntitiesRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeEntitiesResponse", + "shortName": "analyze_entities" + }, + "description": "Sample for AnalyzeEntities", + "file": "language_v1_generated_language_service_analyze_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeEntities_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_entities", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntities", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeEntitiesRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeEntitiesResponse", + "shortName": "analyze_entities" + }, + "description": "Sample for AnalyzeEntities", + "file": "language_v1_generated_language_service_analyze_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeEntities_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_entity_sentiment", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntitySentiment", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntitySentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeEntitySentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeEntitySentimentResponse", + "shortName": "analyze_entity_sentiment" + }, + "description": "Sample for AnalyzeEntitySentiment", + "file": "language_v1_generated_language_service_analyze_entity_sentiment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeEntitySentiment_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_entity_sentiment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_entity_sentiment", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntitySentiment", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntitySentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeEntitySentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeEntitySentimentResponse", + "shortName": "analyze_entity_sentiment" + }, + "description": "Sample for AnalyzeEntitySentiment", + "file": "language_v1_generated_language_service_analyze_entity_sentiment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeEntitySentiment_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_entity_sentiment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_sentiment", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSentiment", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeSentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeSentimentResponse", + "shortName": "analyze_sentiment" + }, + "description": "Sample for AnalyzeSentiment", + "file": "language_v1_generated_language_service_analyze_sentiment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeSentiment_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_sentiment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_sentiment", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSentiment", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeSentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeSentimentResponse", + "shortName": "analyze_sentiment" + }, + "description": "Sample for AnalyzeSentiment", + "file": "language_v1_generated_language_service_analyze_sentiment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeSentiment_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_sentiment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_syntax", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSyntax", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSyntax" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeSyntaxRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeSyntaxResponse", + "shortName": "analyze_syntax" + }, + "description": "Sample for AnalyzeSyntax", + "file": "language_v1_generated_language_service_analyze_syntax_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeSyntax_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_syntax_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_syntax", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSyntax", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSyntax" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeSyntaxRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeSyntaxResponse", + "shortName": "analyze_syntax" + }, + "description": "Sample for AnalyzeSyntax", + "file": "language_v1_generated_language_service_analyze_syntax_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeSyntax_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_syntax_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.annotate_text", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnnotateText", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnnotateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnnotateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "features", + "type": "google.cloud.language_v1.types.AnnotateTextRequest.Features" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnnotateTextResponse", + "shortName": "annotate_text" + }, + "description": "Sample for AnnotateText", + "file": "language_v1_generated_language_service_annotate_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnnotateText_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_annotate_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceClient.annotate_text", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnnotateText", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnnotateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnnotateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "features", + "type": "google.cloud.language_v1.types.AnnotateTextRequest.Features" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnnotateTextResponse", + "shortName": "annotate_text" + }, + "description": "Sample for AnnotateText", + "file": "language_v1_generated_language_service_annotate_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnnotateText_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_annotate_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.classify_text", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.ClassifyText", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ClassifyText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.ClassifyTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.ClassifyTextResponse", + "shortName": "classify_text" + }, + "description": "Sample for ClassifyText", + "file": "language_v1_generated_language_service_classify_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_ClassifyText_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_classify_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceClient.classify_text", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.ClassifyText", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ClassifyText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.ClassifyTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.ClassifyTextResponse", + "shortName": "classify_text" + }, + "description": "Sample for ClassifyText", + "file": "language_v1_generated_language_service_classify_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_ClassifyText_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_classify_text_sync.py" + } + ] +} diff --git a/owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py new file mode 100644 index 00000000..fc15df57 --- /dev/null +++ b/owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py @@ -0,0 +1,181 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class languageCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'analyze_entities': ('document', 'encoding_type', ), + 'analyze_entity_sentiment': ('document', 'encoding_type', ), + 'analyze_sentiment': ('document', 'encoding_type', ), + 'analyze_syntax': ('document', 'encoding_type', ), + 'annotate_text': ('document', 'features', 'encoding_type', ), + 'classify_text': ('document', 'classification_model_options', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=languageCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the language client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py new file mode 100644 index 00000000..047e5bce --- /dev/null +++ b/owl-bot-staging/v1/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-language' + + +description = "Google Cloud Language API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/language/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-language" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v1/testing/constraints-3.10.txt b/owl-bot-staging/v1/testing/constraints-3.10.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.11.txt b/owl-bot-staging/v1/testing/constraints-3.11.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.12.txt b/owl-bot-staging/v1/testing/constraints-3.12.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.7.txt b/owl-bot-staging/v1/testing/constraints-3.7.txt new file mode 100644 index 00000000..6c44adfe --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/owl-bot-staging/v1/testing/constraints-3.8.txt b/owl-bot-staging/v1/testing/constraints-3.8.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.9.txt b/owl-bot-staging/v1/testing/constraints-3.9.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py b/owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py new file mode 100644 index 00000000..80a2a6a3 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py @@ -0,0 +1,3674 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.language_v1.services.language_service import LanguageServiceAsyncClient +from google.cloud.language_v1.services.language_service import LanguageServiceClient +from google.cloud.language_v1.services.language_service import transports +from google.cloud.language_v1.types import language_service +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LanguageServiceClient._get_default_mtls_endpoint(None) is None + assert LanguageServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (LanguageServiceClient, "grpc"), + (LanguageServiceAsyncClient, "grpc_asyncio"), + (LanguageServiceClient, "rest"), +]) +def test_language_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'language.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://language.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.LanguageServiceGrpcTransport, "grpc"), + (transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.LanguageServiceRestTransport, "rest"), +]) +def test_language_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (LanguageServiceClient, "grpc"), + (LanguageServiceAsyncClient, "grpc_asyncio"), + (LanguageServiceClient, "rest"), +]) +def test_language_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'language.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://language.googleapis.com' + ) + + +def test_language_service_client_get_transport_class(): + transport = LanguageServiceClient.get_transport_class() + available_transports = [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceRestTransport, + ] + assert transport in available_transports + + transport = LanguageServiceClient.get_transport_class("grpc") + assert transport == transports.LanguageServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), +]) +@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) +@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) +def test_language_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "true"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "false"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "true"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) +@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_language_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + LanguageServiceClient, LanguageServiceAsyncClient +]) +@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) +@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) +def test_language_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), +]) +def test_language_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", None), +]) +def test_language_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_language_service_client_client_options_from_dict(): + with mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = LanguageServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_language_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "language.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="language.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSentimentRequest, + dict, +]) +def test_analyze_sentiment(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse( + language='language_value', + ) + response = client.analyze_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_sentiment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + client.analyze_sentiment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSentimentRequest() + +@pytest.mark.asyncio +async def test_analyze_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSentimentRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse( + language='language_value', + )) + response = await client.analyze_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_sentiment_async_from_dict(): + await test_analyze_sentiment_async(request_type=dict) + + +def test_analyze_sentiment_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_sentiment_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_sentiment_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_sentiment_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitiesRequest, + dict, +]) +def test_analyze_entities(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse( + language='language_value', + ) + response = client.analyze_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + assert response.language == 'language_value' + + +def test_analyze_entities_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + client.analyze_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitiesRequest() + +@pytest.mark.asyncio +async def test_analyze_entities_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitiesRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse( + language='language_value', + )) + response = await client.analyze_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_entities_async_from_dict(): + await test_analyze_entities_async(request_type=dict) + + +def test_analyze_entities_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_entities( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_entities_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_entities_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_entities( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_entities_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitySentimentRequest, + dict, +]) +def test_analyze_entity_sentiment(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse( + language='language_value', + ) + response = client.analyze_entity_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_entity_sentiment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + client.analyze_entity_sentiment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitySentimentRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse( + language='language_value', + )) + response = await client.analyze_entity_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_async_from_dict(): + await test_analyze_entity_sentiment_async(request_type=dict) + + +def test_analyze_entity_sentiment_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_entity_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_entity_sentiment_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_entity_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSyntaxRequest, + dict, +]) +def test_analyze_syntax(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse( + language='language_value', + ) + response = client.analyze_syntax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSyntaxRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + assert response.language == 'language_value' + + +def test_analyze_syntax_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + client.analyze_syntax() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSyntaxRequest() + +@pytest.mark.asyncio +async def test_analyze_syntax_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSyntaxRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse( + language='language_value', + )) + response = await client.analyze_syntax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSyntaxRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_syntax_async_from_dict(): + await test_analyze_syntax_async(request_type=dict) + + +def test_analyze_syntax_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_syntax( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_syntax_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_syntax_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_syntax( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_syntax_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.ClassifyTextRequest, + dict, +]) +def test_classify_text(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse( + ) + response = client.classify_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ClassifyTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +def test_classify_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + client.classify_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ClassifyTextRequest() + +@pytest.mark.asyncio +async def test_classify_text_async(transport: str = 'grpc_asyncio', request_type=language_service.ClassifyTextRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse( + )) + response = await client.classify_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ClassifyTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +@pytest.mark.asyncio +async def test_classify_text_async_from_dict(): + await test_classify_text_async(request_type=dict) + + +def test_classify_text_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.classify_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + + +def test_classify_text_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + +@pytest.mark.asyncio +async def test_classify_text_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.classify_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_classify_text_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnnotateTextRequest, + dict, +]) +def test_annotate_text(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse( + language='language_value', + ) + response = client.annotate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnnotateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + assert response.language == 'language_value' + + +def test_annotate_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + client.annotate_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnnotateTextRequest() + +@pytest.mark.asyncio +async def test_annotate_text_async(transport: str = 'grpc_asyncio', request_type=language_service.AnnotateTextRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse( + language='language_value', + )) + response = await client.annotate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnnotateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_annotate_text_async_from_dict(): + await test_annotate_text_async(request_type=dict) + + +def test_annotate_text_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.annotate_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].features + mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_annotate_text_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_annotate_text_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.annotate_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].features + mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_annotate_text_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSentimentRequest, + dict, +]) +def test_analyze_sentiment_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_sentiment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_sentiment_rest_required_fields(request_type=language_service.AnalyzeSentimentRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_sentiment(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_sentiment_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_sentiment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_sentiment_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_sentiment") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_sentiment") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeSentimentRequest.pb(language_service.AnalyzeSentimentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeSentimentResponse.to_json(language_service.AnalyzeSentimentResponse()) + + request = language_service.AnalyzeSentimentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeSentimentResponse() + + client.analyze_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSentimentRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_sentiment(request) + + +def test_analyze_sentiment_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_sentiment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/documents:analyzeSentiment" % client.transport._host, args[1]) + + +def test_analyze_sentiment_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_sentiment_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitiesRequest, + dict, +]) +def test_analyze_entities_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_entities(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + assert response.language == 'language_value' + + +def test_analyze_entities_rest_required_fields(request_type=language_service.AnalyzeEntitiesRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_entities(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_entities_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_entities._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_entities_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entities") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entities") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeEntitiesRequest.pb(language_service.AnalyzeEntitiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeEntitiesResponse.to_json(language_service.AnalyzeEntitiesResponse()) + + request = language_service.AnalyzeEntitiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeEntitiesResponse() + + client.analyze_entities(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_entities_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitiesRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_entities(request) + + +def test_analyze_entities_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_entities(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/documents:analyzeEntities" % client.transport._host, args[1]) + + +def test_analyze_entities_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entities_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitySentimentRequest, + dict, +]) +def test_analyze_entity_sentiment_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_entity_sentiment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_entity_sentiment_rest_required_fields(request_type=language_service.AnalyzeEntitySentimentRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_entity_sentiment(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_entity_sentiment_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_entity_sentiment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entity_sentiment") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entity_sentiment") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeEntitySentimentRequest.pb(language_service.AnalyzeEntitySentimentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeEntitySentimentResponse.to_json(language_service.AnalyzeEntitySentimentResponse()) + + request = language_service.AnalyzeEntitySentimentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeEntitySentimentResponse() + + client.analyze_entity_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_entity_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitySentimentRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_entity_sentiment(request) + + +def test_analyze_entity_sentiment_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_entity_sentiment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/documents:analyzeEntitySentiment" % client.transport._host, args[1]) + + +def test_analyze_entity_sentiment_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entity_sentiment_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSyntaxRequest, + dict, +]) +def test_analyze_syntax_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_syntax(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + assert response.language == 'language_value' + + +def test_analyze_syntax_rest_required_fields(request_type=language_service.AnalyzeSyntaxRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_syntax(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_syntax_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_syntax._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_syntax_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_syntax") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_syntax") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeSyntaxRequest.pb(language_service.AnalyzeSyntaxRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeSyntaxResponse.to_json(language_service.AnalyzeSyntaxResponse()) + + request = language_service.AnalyzeSyntaxRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeSyntaxResponse() + + client.analyze_syntax(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_syntax_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSyntaxRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_syntax(request) + + +def test_analyze_syntax_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_syntax(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/documents:analyzeSyntax" % client.transport._host, args[1]) + + +def test_analyze_syntax_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_syntax_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.ClassifyTextRequest, + dict, +]) +def test_classify_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.classify_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +def test_classify_text_rest_required_fields(request_type=language_service.ClassifyTextRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.classify_text(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_classify_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.classify_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_classify_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_classify_text") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_classify_text") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.ClassifyTextRequest.pb(language_service.ClassifyTextRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.ClassifyTextResponse.to_json(language_service.ClassifyTextResponse()) + + request = language_service.ClassifyTextRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.ClassifyTextResponse() + + client.classify_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_classify_text_rest_bad_request(transport: str = 'rest', request_type=language_service.ClassifyTextRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.classify_text(request) + + +def test_classify_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.classify_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/documents:classifyText" % client.transport._host, args[1]) + + +def test_classify_text_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + +def test_classify_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnnotateTextRequest, + dict, +]) +def test_annotate_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.annotate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + assert response.language == 'language_value' + + +def test_annotate_text_rest_required_fields(request_type=language_service.AnnotateTextRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.annotate_text(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_annotate_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.annotate_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", "features", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_annotate_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_annotate_text") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_annotate_text") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnnotateTextRequest.pb(language_service.AnnotateTextRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnnotateTextResponse.to_json(language_service.AnnotateTextResponse()) + + request = language_service.AnnotateTextRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnnotateTextResponse() + + client.annotate_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_annotate_text_rest_bad_request(transport: str = 'rest', request_type=language_service.AnnotateTextRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.annotate_text(request) + + +def test_annotate_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.annotate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/documents:annotateText" % client.transport._host, args[1]) + + +def test_annotate_text_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_annotate_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LanguageServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LanguageServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + transports.LanguageServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = LanguageServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LanguageServiceGrpcTransport, + ) + +def test_language_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LanguageServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_language_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.LanguageServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'analyze_sentiment', + 'analyze_entities', + 'analyze_entity_sentiment', + 'analyze_syntax', + 'classify_text', + 'annotate_text', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_language_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LanguageServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_language_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LanguageServiceTransport() + adc.assert_called_once() + + +def test_language_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LanguageServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + ], +) +def test_language_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-language', 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + transports.LanguageServiceRestTransport, + ], +) +def test_language_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LanguageServiceGrpcTransport, grpc_helpers), + (transports.LanguageServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_language_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "language.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="language.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) +def test_language_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_language_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.LanguageServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_language_service_host_no_port(transport_name): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'language.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://language.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_language_service_host_with_port(transport_name): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'language.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://language.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_language_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LanguageServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LanguageServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.analyze_sentiment._session + session2 = client2.transport.analyze_sentiment._session + assert session1 != session2 + session1 = client1.transport.analyze_entities._session + session2 = client2.transport.analyze_entities._session + assert session1 != session2 + session1 = client1.transport.analyze_entity_sentiment._session + session2 = client2.transport.analyze_entity_sentiment._session + assert session1 != session2 + session1 = client1.transport.analyze_syntax._session + session2 = client2.transport.analyze_syntax._session + assert session1 != session2 + session1 = client1.transport.classify_text._session + session2 = client2.transport.classify_text._session + assert session1 != session2 + session1 = client1.transport.annotate_text._session + session2 = client2.transport.annotate_text._session + assert session1 != session2 +def test_language_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LanguageServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_language_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LanguageServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) +def test_language_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) +def test_language_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = LanguageServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = LanguageServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = LanguageServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = LanguageServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = LanguageServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = LanguageServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = LanguageServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = LanguageServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = LanguageServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = LanguageServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = LanguageServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1beta2/.coveragerc b/owl-bot-staging/v1beta2/.coveragerc new file mode 100644 index 00000000..c1f51536 --- /dev/null +++ b/owl-bot-staging/v1beta2/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/language/__init__.py + google/cloud/language/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v1beta2/.flake8 b/owl-bot-staging/v1beta2/.flake8 new file mode 100644 index 00000000..29227d4c --- /dev/null +++ b/owl-bot-staging/v1beta2/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v1beta2/MANIFEST.in b/owl-bot-staging/v1beta2/MANIFEST.in new file mode 100644 index 00000000..dcc097e7 --- /dev/null +++ b/owl-bot-staging/v1beta2/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/language *.py +recursive-include google/cloud/language_v1beta2 *.py diff --git a/owl-bot-staging/v1beta2/README.rst b/owl-bot-staging/v1beta2/README.rst new file mode 100644 index 00000000..0c5f1b6b --- /dev/null +++ b/owl-bot-staging/v1beta2/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Language API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Language API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1beta2/docs/conf.py b/owl-bot-staging/v1beta2/docs/conf.py new file mode 100644 index 00000000..2e1b322d --- /dev/null +++ b/owl-bot-staging/v1beta2/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-language documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-language" +copyright = u"2022, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-language-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-language.tex", + u"google-cloud-language Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-language", + u"Google Cloud Language Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-language", + u"google-cloud-language Documentation", + author, + "google-cloud-language", + "GAPIC library for Google Cloud Language API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v1beta2/docs/index.rst b/owl-bot-staging/v1beta2/docs/index.rst new file mode 100644 index 00000000..42b8e680 --- /dev/null +++ b/owl-bot-staging/v1beta2/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + language_v1beta2/services + language_v1beta2/types diff --git a/owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst b/owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst new file mode 100644 index 00000000..799a7892 --- /dev/null +++ b/owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst @@ -0,0 +1,6 @@ +LanguageService +--------------------------------- + +.. automodule:: google.cloud.language_v1beta2.services.language_service + :members: + :inherited-members: diff --git a/owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst b/owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst new file mode 100644 index 00000000..40ead585 --- /dev/null +++ b/owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Language v1beta2 API +============================================== +.. toctree:: + :maxdepth: 2 + + language_service diff --git a/owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst b/owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst new file mode 100644 index 00000000..2e834e61 --- /dev/null +++ b/owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Language v1beta2 API +=========================================== + +.. automodule:: google.cloud.language_v1beta2.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v1beta2/google/cloud/language/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language/__init__.py new file mode 100644 index 00000000..6bfa0911 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language/__init__.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.language import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.language_v1beta2.services.language_service.client import LanguageServiceClient +from google.cloud.language_v1beta2.services.language_service.async_client import LanguageServiceAsyncClient + +from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitiesRequest +from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitiesResponse +from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitySentimentRequest +from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitySentimentResponse +from google.cloud.language_v1beta2.types.language_service import AnalyzeSentimentRequest +from google.cloud.language_v1beta2.types.language_service import AnalyzeSentimentResponse +from google.cloud.language_v1beta2.types.language_service import AnalyzeSyntaxRequest +from google.cloud.language_v1beta2.types.language_service import AnalyzeSyntaxResponse +from google.cloud.language_v1beta2.types.language_service import AnnotateTextRequest +from google.cloud.language_v1beta2.types.language_service import AnnotateTextResponse +from google.cloud.language_v1beta2.types.language_service import ClassificationCategory +from google.cloud.language_v1beta2.types.language_service import ClassificationModelOptions +from google.cloud.language_v1beta2.types.language_service import ClassifyTextRequest +from google.cloud.language_v1beta2.types.language_service import ClassifyTextResponse +from google.cloud.language_v1beta2.types.language_service import DependencyEdge +from google.cloud.language_v1beta2.types.language_service import Document +from google.cloud.language_v1beta2.types.language_service import Entity +from google.cloud.language_v1beta2.types.language_service import EntityMention +from google.cloud.language_v1beta2.types.language_service import ModerateTextRequest +from google.cloud.language_v1beta2.types.language_service import ModerateTextResponse +from google.cloud.language_v1beta2.types.language_service import PartOfSpeech +from google.cloud.language_v1beta2.types.language_service import Sentence +from google.cloud.language_v1beta2.types.language_service import Sentiment +from google.cloud.language_v1beta2.types.language_service import TextSpan +from google.cloud.language_v1beta2.types.language_service import Token +from google.cloud.language_v1beta2.types.language_service import EncodingType + +__all__ = ('LanguageServiceClient', + 'LanguageServiceAsyncClient', + 'AnalyzeEntitiesRequest', + 'AnalyzeEntitiesResponse', + 'AnalyzeEntitySentimentRequest', + 'AnalyzeEntitySentimentResponse', + 'AnalyzeSentimentRequest', + 'AnalyzeSentimentResponse', + 'AnalyzeSyntaxRequest', + 'AnalyzeSyntaxResponse', + 'AnnotateTextRequest', + 'AnnotateTextResponse', + 'ClassificationCategory', + 'ClassificationModelOptions', + 'ClassifyTextRequest', + 'ClassifyTextResponse', + 'DependencyEdge', + 'Document', + 'Entity', + 'EntityMention', + 'ModerateTextRequest', + 'ModerateTextResponse', + 'PartOfSpeech', + 'Sentence', + 'Sentiment', + 'TextSpan', + 'Token', + 'EncodingType', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py b/owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py new file mode 100644 index 00000000..405b1ceb --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1beta2/google/cloud/language/py.typed b/owl-bot-staging/v1beta2/google/cloud/language/py.typed new file mode 100644 index 00000000..c0acc99a --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py new file mode 100644 index 00000000..e6a87024 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.language_v1beta2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.language_service import LanguageServiceClient +from .services.language_service import LanguageServiceAsyncClient + +from .types.language_service import AnalyzeEntitiesRequest +from .types.language_service import AnalyzeEntitiesResponse +from .types.language_service import AnalyzeEntitySentimentRequest +from .types.language_service import AnalyzeEntitySentimentResponse +from .types.language_service import AnalyzeSentimentRequest +from .types.language_service import AnalyzeSentimentResponse +from .types.language_service import AnalyzeSyntaxRequest +from .types.language_service import AnalyzeSyntaxResponse +from .types.language_service import AnnotateTextRequest +from .types.language_service import AnnotateTextResponse +from .types.language_service import ClassificationCategory +from .types.language_service import ClassificationModelOptions +from .types.language_service import ClassifyTextRequest +from .types.language_service import ClassifyTextResponse +from .types.language_service import DependencyEdge +from .types.language_service import Document +from .types.language_service import Entity +from .types.language_service import EntityMention +from .types.language_service import ModerateTextRequest +from .types.language_service import ModerateTextResponse +from .types.language_service import PartOfSpeech +from .types.language_service import Sentence +from .types.language_service import Sentiment +from .types.language_service import TextSpan +from .types.language_service import Token +from .types.language_service import EncodingType + +__all__ = ( + 'LanguageServiceAsyncClient', +'AnalyzeEntitiesRequest', +'AnalyzeEntitiesResponse', +'AnalyzeEntitySentimentRequest', +'AnalyzeEntitySentimentResponse', +'AnalyzeSentimentRequest', +'AnalyzeSentimentResponse', +'AnalyzeSyntaxRequest', +'AnalyzeSyntaxResponse', +'AnnotateTextRequest', +'AnnotateTextResponse', +'ClassificationCategory', +'ClassificationModelOptions', +'ClassifyTextRequest', +'ClassifyTextResponse', +'DependencyEdge', +'Document', +'EncodingType', +'Entity', +'EntityMention', +'LanguageServiceClient', +'ModerateTextRequest', +'ModerateTextResponse', +'PartOfSpeech', +'Sentence', +'Sentiment', +'TextSpan', +'Token', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json new file mode 100644 index 00000000..85a901f9 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json @@ -0,0 +1,133 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.language_v1beta2", + "protoPackage": "google.cloud.language.v1beta2", + "schema": "1.0", + "services": { + "LanguageService": { + "clients": { + "grpc": { + "libraryClient": "LanguageServiceClient", + "rpcs": { + "AnalyzeEntities": { + "methods": [ + "analyze_entities" + ] + }, + "AnalyzeEntitySentiment": { + "methods": [ + "analyze_entity_sentiment" + ] + }, + "AnalyzeSentiment": { + "methods": [ + "analyze_sentiment" + ] + }, + "AnalyzeSyntax": { + "methods": [ + "analyze_syntax" + ] + }, + "AnnotateText": { + "methods": [ + "annotate_text" + ] + }, + "ClassifyText": { + "methods": [ + "classify_text" + ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] + } + } + }, + "grpc-async": { + "libraryClient": "LanguageServiceAsyncClient", + "rpcs": { + "AnalyzeEntities": { + "methods": [ + "analyze_entities" + ] + }, + "AnalyzeEntitySentiment": { + "methods": [ + "analyze_entity_sentiment" + ] + }, + "AnalyzeSentiment": { + "methods": [ + "analyze_sentiment" + ] + }, + "AnalyzeSyntax": { + "methods": [ + "analyze_syntax" + ] + }, + "AnnotateText": { + "methods": [ + "annotate_text" + ] + }, + "ClassifyText": { + "methods": [ + "classify_text" + ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] + } + } + }, + "rest": { + "libraryClient": "LanguageServiceClient", + "rpcs": { + "AnalyzeEntities": { + "methods": [ + "analyze_entities" + ] + }, + "AnalyzeEntitySentiment": { + "methods": [ + "analyze_entity_sentiment" + ] + }, + "AnalyzeSentiment": { + "methods": [ + "analyze_sentiment" + ] + }, + "AnalyzeSyntax": { + "methods": [ + "analyze_syntax" + ] + }, + "AnnotateText": { + "methods": [ + "annotate_text" + ] + }, + "ClassifyText": { + "methods": [ + "classify_text" + ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py new file mode 100644 index 00000000..405b1ceb --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed new file mode 100644 index 00000000..c0acc99a --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py new file mode 100644 index 00000000..e8e1c384 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py new file mode 100644 index 00000000..6e5f9052 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import LanguageServiceClient +from .async_client import LanguageServiceAsyncClient + +__all__ = ( + 'LanguageServiceClient', + 'LanguageServiceAsyncClient', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py new file mode 100644 index 00000000..a3a46b78 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py @@ -0,0 +1,963 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.language_v1beta2 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.language_v1beta2.types import language_service +from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .client import LanguageServiceClient + + +class LanguageServiceAsyncClient: + """Provides text analysis operations such as sentiment analysis + and entity recognition. + """ + + _client: LanguageServiceClient + + DEFAULT_ENDPOINT = LanguageServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LanguageServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod(LanguageServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(LanguageServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(LanguageServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(LanguageServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(LanguageServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(LanguageServiceClient.parse_common_organization_path) + common_project_path = staticmethod(LanguageServiceClient.common_project_path) + parse_common_project_path = staticmethod(LanguageServiceClient.parse_common_project_path) + common_location_path = staticmethod(LanguageServiceClient.common_location_path) + parse_common_location_path = staticmethod(LanguageServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceAsyncClient: The constructed client. + """ + return LanguageServiceClient.from_service_account_info.__func__(LanguageServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceAsyncClient: The constructed client. + """ + return LanguageServiceClient.from_service_account_file.__func__(LanguageServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return LanguageServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> LanguageServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LanguageServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(LanguageServiceClient).get_transport_class, type(LanguageServiceClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, LanguageServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the language service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.LanguageServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = LanguageServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def analyze_sentiment(self, + request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: + r"""Analyzes the sentiment of the provided text. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_analyze_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeSentimentRequest, dict]]): + The request object. The sentiment analysis request + message. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): + The encoding type used by the API to + calculate sentence offsets for the + sentence sentiment. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeSentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_entities(self, + request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_analyze_entities(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entities(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest, dict]]): + The request object. The entity analysis request message. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeEntitiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_entities, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_entity_sentiment(self, + request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest, dict]]): + The request object. The entity-level sentiment analysis + request message. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeEntitySentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_entity_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_syntax(self, + request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_analyze_syntax(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = await client.analyze_syntax(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest, dict]]): + The request object. The syntax analysis request message. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeSyntaxRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_syntax, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def classify_text(self, + request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ClassifyTextResponse: + r"""Classifies a document into categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_classify_text(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = await client.classify_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.ClassifyTextRequest, dict]]): + The request object. The document classification request + message. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.ClassifyTextResponse: + The document classification response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.ClassifyTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.classify_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def moderate_text(self, + request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ModerateTextResponse: + r"""Moderates a document for harmful and sensitive + categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_moderate_text(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ModerateTextRequest( + document=document, + ) + + # Make the request + response = await client.moderate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.ModerateTextRequest, dict]]): + The request object. The document moderation request + message. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.ModerateTextResponse: + The document moderation response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.ModerateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.moderate_text, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def annotate_text(self, + request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + features: Optional[language_service.AnnotateTextRequest.Features] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnnotateTextResponse: + r"""A convenience method that provides all syntax, + sentiment, entity, and classification features in one + call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_annotate_text(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = await client.annotate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.AnnotateTextRequest, dict]]): + The request object. The request message for the text + annotation API, which can perform + multiple analysis types (sentiment, + entities, and syntax) in one call. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (:class:`google.cloud.language_v1beta2.types.AnnotateTextRequest.Features`): + Required. The enabled features. + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnnotateTextResponse: + The text annotations response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, features, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnnotateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if features is not None: + request.features = features + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.annotate_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "LanguageServiceAsyncClient", +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py new file mode 100644 index 00000000..9093d5b6 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py @@ -0,0 +1,1116 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.language_v1beta2 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.language_v1beta2.types import language_service +from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import LanguageServiceGrpcTransport +from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .transports.rest import LanguageServiceRestTransport + + +class LanguageServiceClientMeta(type): + """Metaclass for the LanguageService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] + _transport_registry["grpc"] = LanguageServiceGrpcTransport + _transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport + _transport_registry["rest"] = LanguageServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[LanguageServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LanguageServiceClient(metaclass=LanguageServiceClientMeta): + """Provides text analysis operations such as sentiment analysis + and entity recognition. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "language.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LanguageServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LanguageServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, LanguageServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the language service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, LanguageServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, LanguageServiceTransport): + # transport is a LanguageServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def analyze_sentiment(self, + request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: + r"""Analyzes the sentiment of the provided text. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_analyze_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.AnalyzeSentimentRequest, dict]): + The request object. The sentiment analysis request + message. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate sentence offsets for the + sentence sentiment. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeSentimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeSentimentRequest): + request = language_service.AnalyzeSentimentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_sentiment] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_entities(self, + request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_analyze_entities(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = client.analyze_entities(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest, dict]): + The request object. The entity analysis request message. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeEntitiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeEntitiesRequest): + request = language_service.AnalyzeEntitiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_entities] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_entity_sentiment(self, + request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest, dict]): + The request object. The entity-level sentiment analysis + request message. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeEntitySentimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeEntitySentimentRequest): + request = language_service.AnalyzeEntitySentimentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_entity_sentiment] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_syntax(self, + request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_analyze_syntax(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = client.analyze_syntax(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest, dict]): + The request object. The syntax analysis request message. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeSyntaxRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeSyntaxRequest): + request = language_service.AnalyzeSyntaxRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_syntax] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def classify_text(self, + request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ClassifyTextResponse: + r"""Classifies a document into categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_classify_text(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = client.classify_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.ClassifyTextRequest, dict]): + The request object. The document classification request + message. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.ClassifyTextResponse: + The document classification response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.ClassifyTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.ClassifyTextRequest): + request = language_service.ClassifyTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.classify_text] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def moderate_text(self, + request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ModerateTextResponse: + r"""Moderates a document for harmful and sensitive + categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_moderate_text(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ModerateTextRequest( + document=document, + ) + + # Make the request + response = client.moderate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.ModerateTextRequest, dict]): + The request object. The document moderation request + message. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.ModerateTextResponse: + The document moderation response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.ModerateTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.ModerateTextRequest): + request = language_service.ModerateTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.moderate_text] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def annotate_text(self, + request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + features: Optional[language_service.AnnotateTextRequest.Features] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnnotateTextResponse: + r"""A convenience method that provides all syntax, + sentiment, entity, and classification features in one + call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_annotate_text(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = client.annotate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.AnnotateTextRequest, dict]): + The request object. The request message for the text + annotation API, which can perform + multiple analysis types (sentiment, + entities, and syntax) in one call. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (google.cloud.language_v1beta2.types.AnnotateTextRequest.Features): + Required. The enabled features. + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnnotateTextResponse: + The text annotations response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, features, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnnotateTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnnotateTextRequest): + request = language_service.AnnotateTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if features is not None: + request.features = features + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.annotate_text] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "LanguageServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "LanguageServiceClient", +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py new file mode 100644 index 00000000..3cb6ab92 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import LanguageServiceTransport +from .grpc import LanguageServiceGrpcTransport +from .grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .rest import LanguageServiceRestTransport +from .rest import LanguageServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] +_transport_registry['grpc'] = LanguageServiceGrpcTransport +_transport_registry['grpc_asyncio'] = LanguageServiceGrpcAsyncIOTransport +_transport_registry['rest'] = LanguageServiceRestTransport + +__all__ = ( + 'LanguageServiceTransport', + 'LanguageServiceGrpcTransport', + 'LanguageServiceGrpcAsyncIOTransport', + 'LanguageServiceRestTransport', + 'LanguageServiceRestInterceptor', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py new file mode 100644 index 00000000..99ee1db2 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.language_v1beta2 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.language_v1beta2.types import language_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class LanguageServiceTransport(abc.ABC): + """Abstract transport class for LanguageService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'language.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.analyze_sentiment: gapic_v1.method.wrap_method( + self.analyze_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_entities: gapic_v1.method.wrap_method( + self.analyze_entities, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_entity_sentiment: gapic_v1.method.wrap_method( + self.analyze_entity_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_syntax: gapic_v1.method.wrap_method( + self.analyze_syntax, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.classify_text: gapic_v1.method.wrap_method( + self.classify_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.moderate_text: gapic_v1.method.wrap_method( + self.moderate_text, + default_timeout=None, + client_info=client_info, + ), + self.annotate_text: gapic_v1.method.wrap_method( + self.annotate_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + Union[ + language_service.AnalyzeSentimentResponse, + Awaitable[language_service.AnalyzeSentimentResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + Union[ + language_service.AnalyzeEntitiesResponse, + Awaitable[language_service.AnalyzeEntitiesResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + Union[ + language_service.AnalyzeEntitySentimentResponse, + Awaitable[language_service.AnalyzeEntitySentimentResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + Union[ + language_service.AnalyzeSyntaxResponse, + Awaitable[language_service.AnalyzeSyntaxResponse] + ]]: + raise NotImplementedError() + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + Union[ + language_service.ClassifyTextResponse, + Awaitable[language_service.ClassifyTextResponse] + ]]: + raise NotImplementedError() + + @property + def moderate_text(self) -> Callable[ + [language_service.ModerateTextRequest], + Union[ + language_service.ModerateTextResponse, + Awaitable[language_service.ModerateTextResponse] + ]]: + raise NotImplementedError() + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + Union[ + language_service.AnnotateTextResponse, + Awaitable[language_service.AnnotateTextResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'LanguageServiceTransport', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py new file mode 100644 index 00000000..48b7cd8b --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py @@ -0,0 +1,432 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.language_v1beta2.types import language_service +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO + + +class LanguageServiceGrpcTransport(LanguageServiceTransport): + """gRPC backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + language_service.AnalyzeSentimentResponse]: + r"""Return a callable for the analyze sentiment method over gRPC. + + Analyzes the sentiment of the provided text. + + Returns: + Callable[[~.AnalyzeSentimentRequest], + ~.AnalyzeSentimentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_sentiment' not in self._stubs: + self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment', + request_serializer=language_service.AnalyzeSentimentRequest.serialize, + response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, + ) + return self._stubs['analyze_sentiment'] + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + language_service.AnalyzeEntitiesResponse]: + r"""Return a callable for the analyze entities method over gRPC. + + Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Returns: + Callable[[~.AnalyzeEntitiesRequest], + ~.AnalyzeEntitiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entities' not in self._stubs: + self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities', + request_serializer=language_service.AnalyzeEntitiesRequest.serialize, + response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, + ) + return self._stubs['analyze_entities'] + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + language_service.AnalyzeEntitySentimentResponse]: + r"""Return a callable for the analyze entity sentiment method over gRPC. + + Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Returns: + Callable[[~.AnalyzeEntitySentimentRequest], + ~.AnalyzeEntitySentimentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entity_sentiment' not in self._stubs: + self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment', + request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, + response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, + ) + return self._stubs['analyze_entity_sentiment'] + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + language_service.AnalyzeSyntaxResponse]: + r"""Return a callable for the analyze syntax method over gRPC. + + Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + Returns: + Callable[[~.AnalyzeSyntaxRequest], + ~.AnalyzeSyntaxResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_syntax' not in self._stubs: + self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax', + request_serializer=language_service.AnalyzeSyntaxRequest.serialize, + response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, + ) + return self._stubs['analyze_syntax'] + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + language_service.ClassifyTextResponse]: + r"""Return a callable for the classify text method over gRPC. + + Classifies a document into categories. + + Returns: + Callable[[~.ClassifyTextRequest], + ~.ClassifyTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'classify_text' not in self._stubs: + self._stubs['classify_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/ClassifyText', + request_serializer=language_service.ClassifyTextRequest.serialize, + response_deserializer=language_service.ClassifyTextResponse.deserialize, + ) + return self._stubs['classify_text'] + + @property + def moderate_text(self) -> Callable[ + [language_service.ModerateTextRequest], + language_service.ModerateTextResponse]: + r"""Return a callable for the moderate text method over gRPC. + + Moderates a document for harmful and sensitive + categories. + + Returns: + Callable[[~.ModerateTextRequest], + ~.ModerateTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'moderate_text' not in self._stubs: + self._stubs['moderate_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/ModerateText', + request_serializer=language_service.ModerateTextRequest.serialize, + response_deserializer=language_service.ModerateTextResponse.deserialize, + ) + return self._stubs['moderate_text'] + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + language_service.AnnotateTextResponse]: + r"""Return a callable for the annotate text method over gRPC. + + A convenience method that provides all syntax, + sentiment, entity, and classification features in one + call. + + Returns: + Callable[[~.AnnotateTextRequest], + ~.AnnotateTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'annotate_text' not in self._stubs: + self._stubs['annotate_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnnotateText', + request_serializer=language_service.AnnotateTextRequest.serialize, + response_deserializer=language_service.AnnotateTextResponse.deserialize, + ) + return self._stubs['annotate_text'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'LanguageServiceGrpcTransport', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..710e8bb5 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py @@ -0,0 +1,431 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.language_v1beta2.types import language_service +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import LanguageServiceGrpcTransport + + +class LanguageServiceGrpcAsyncIOTransport(LanguageServiceTransport): + """gRPC AsyncIO backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + Awaitable[language_service.AnalyzeSentimentResponse]]: + r"""Return a callable for the analyze sentiment method over gRPC. + + Analyzes the sentiment of the provided text. + + Returns: + Callable[[~.AnalyzeSentimentRequest], + Awaitable[~.AnalyzeSentimentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_sentiment' not in self._stubs: + self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment', + request_serializer=language_service.AnalyzeSentimentRequest.serialize, + response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, + ) + return self._stubs['analyze_sentiment'] + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + Awaitable[language_service.AnalyzeEntitiesResponse]]: + r"""Return a callable for the analyze entities method over gRPC. + + Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Returns: + Callable[[~.AnalyzeEntitiesRequest], + Awaitable[~.AnalyzeEntitiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entities' not in self._stubs: + self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities', + request_serializer=language_service.AnalyzeEntitiesRequest.serialize, + response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, + ) + return self._stubs['analyze_entities'] + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + Awaitable[language_service.AnalyzeEntitySentimentResponse]]: + r"""Return a callable for the analyze entity sentiment method over gRPC. + + Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Returns: + Callable[[~.AnalyzeEntitySentimentRequest], + Awaitable[~.AnalyzeEntitySentimentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entity_sentiment' not in self._stubs: + self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment', + request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, + response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, + ) + return self._stubs['analyze_entity_sentiment'] + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + Awaitable[language_service.AnalyzeSyntaxResponse]]: + r"""Return a callable for the analyze syntax method over gRPC. + + Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + Returns: + Callable[[~.AnalyzeSyntaxRequest], + Awaitable[~.AnalyzeSyntaxResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_syntax' not in self._stubs: + self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax', + request_serializer=language_service.AnalyzeSyntaxRequest.serialize, + response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, + ) + return self._stubs['analyze_syntax'] + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + Awaitable[language_service.ClassifyTextResponse]]: + r"""Return a callable for the classify text method over gRPC. + + Classifies a document into categories. + + Returns: + Callable[[~.ClassifyTextRequest], + Awaitable[~.ClassifyTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'classify_text' not in self._stubs: + self._stubs['classify_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/ClassifyText', + request_serializer=language_service.ClassifyTextRequest.serialize, + response_deserializer=language_service.ClassifyTextResponse.deserialize, + ) + return self._stubs['classify_text'] + + @property + def moderate_text(self) -> Callable[ + [language_service.ModerateTextRequest], + Awaitable[language_service.ModerateTextResponse]]: + r"""Return a callable for the moderate text method over gRPC. + + Moderates a document for harmful and sensitive + categories. + + Returns: + Callable[[~.ModerateTextRequest], + Awaitable[~.ModerateTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'moderate_text' not in self._stubs: + self._stubs['moderate_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/ModerateText', + request_serializer=language_service.ModerateTextRequest.serialize, + response_deserializer=language_service.ModerateTextResponse.deserialize, + ) + return self._stubs['moderate_text'] + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + Awaitable[language_service.AnnotateTextResponse]]: + r"""Return a callable for the annotate text method over gRPC. + + A convenience method that provides all syntax, + sentiment, entity, and classification features in one + call. + + Returns: + Callable[[~.AnnotateTextRequest], + Awaitable[~.AnnotateTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'annotate_text' not in self._stubs: + self._stubs['annotate_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnnotateText', + request_serializer=language_service.AnnotateTextRequest.serialize, + response_deserializer=language_service.AnnotateTextResponse.deserialize, + ) + return self._stubs['annotate_text'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'LanguageServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py new file mode 100644 index 00000000..9696c821 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py @@ -0,0 +1,1029 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.language_v1beta2.types import language_service + +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class LanguageServiceRestInterceptor: + """Interceptor for LanguageService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the LanguageServiceRestTransport. + + .. code-block:: python + class MyCustomLanguageServiceInterceptor(LanguageServiceRestInterceptor): + def pre_analyze_entities(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_entities(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_entity_sentiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_entity_sentiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_sentiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_sentiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_syntax(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_syntax(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_annotate_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_annotate_text(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_classify_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_classify_text(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_moderate_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_moderate_text(self, response): + logging.log(f"Received response: {response}") + return response + + transport = LanguageServiceRestTransport(interceptor=MyCustomLanguageServiceInterceptor()) + client = LanguageServiceClient(transport=transport) + + + """ + def pre_analyze_entities(self, request: language_service.AnalyzeEntitiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_entities + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_entities(self, response: language_service.AnalyzeEntitiesResponse) -> language_service.AnalyzeEntitiesResponse: + """Post-rpc interceptor for analyze_entities + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_analyze_entity_sentiment(self, request: language_service.AnalyzeEntitySentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitySentimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_entity_sentiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_entity_sentiment(self, response: language_service.AnalyzeEntitySentimentResponse) -> language_service.AnalyzeEntitySentimentResponse: + """Post-rpc interceptor for analyze_entity_sentiment + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_analyze_sentiment(self, request: language_service.AnalyzeSentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSentimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_sentiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_sentiment(self, response: language_service.AnalyzeSentimentResponse) -> language_service.AnalyzeSentimentResponse: + """Post-rpc interceptor for analyze_sentiment + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_analyze_syntax(self, request: language_service.AnalyzeSyntaxRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSyntaxRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_syntax + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_syntax(self, response: language_service.AnalyzeSyntaxResponse) -> language_service.AnalyzeSyntaxResponse: + """Post-rpc interceptor for analyze_syntax + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_annotate_text(self, request: language_service.AnnotateTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnnotateTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for annotate_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_annotate_text(self, response: language_service.AnnotateTextResponse) -> language_service.AnnotateTextResponse: + """Post-rpc interceptor for annotate_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_classify_text(self, request: language_service.ClassifyTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.ClassifyTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for classify_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_classify_text(self, response: language_service.ClassifyTextResponse) -> language_service.ClassifyTextResponse: + """Post-rpc interceptor for classify_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_moderate_text(self, request: language_service.ModerateTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.ModerateTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for moderate_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_moderate_text(self, response: language_service.ModerateTextResponse) -> language_service.ModerateTextResponse: + """Post-rpc interceptor for moderate_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class LanguageServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: LanguageServiceRestInterceptor + + +class LanguageServiceRestTransport(LanguageServiceTransport): + """REST backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[LanguageServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or LanguageServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AnalyzeEntities(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeEntities") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeEntitiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Call the analyze entities method over HTTP. + + Args: + request (~.language_service.AnalyzeEntitiesRequest): + The request object. The entity analysis request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta2/documents:analyzeEntities', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_entities(request, metadata) + pb_request = language_service.AnalyzeEntitiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeEntitiesResponse() + pb_resp = language_service.AnalyzeEntitiesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_entities(resp) + return resp + + class _AnalyzeEntitySentiment(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeEntitySentiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeEntitySentimentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Call the analyze entity sentiment method over HTTP. + + Args: + request (~.language_service.AnalyzeEntitySentimentRequest): + The request object. The entity-level sentiment analysis + request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta2/documents:analyzeEntitySentiment', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_entity_sentiment(request, metadata) + pb_request = language_service.AnalyzeEntitySentimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeEntitySentimentResponse() + pb_resp = language_service.AnalyzeEntitySentimentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_entity_sentiment(resp) + return resp + + class _AnalyzeSentiment(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeSentiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeSentimentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeSentimentResponse: + r"""Call the analyze sentiment method over HTTP. + + Args: + request (~.language_service.AnalyzeSentimentRequest): + The request object. The sentiment analysis request + message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta2/documents:analyzeSentiment', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_sentiment(request, metadata) + pb_request = language_service.AnalyzeSentimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeSentimentResponse() + pb_resp = language_service.AnalyzeSentimentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_sentiment(resp) + return resp + + class _AnalyzeSyntax(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeSyntax") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeSyntaxRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Call the analyze syntax method over HTTP. + + Args: + request (~.language_service.AnalyzeSyntaxRequest): + The request object. The syntax analysis request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta2/documents:analyzeSyntax', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_syntax(request, metadata) + pb_request = language_service.AnalyzeSyntaxRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeSyntaxResponse() + pb_resp = language_service.AnalyzeSyntaxResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_syntax(resp) + return resp + + class _AnnotateText(LanguageServiceRestStub): + def __hash__(self): + return hash("AnnotateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnnotateTextRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnnotateTextResponse: + r"""Call the annotate text method over HTTP. + + Args: + request (~.language_service.AnnotateTextRequest): + The request object. The request message for the text + annotation API, which can perform + multiple analysis types (sentiment, + entities, and syntax) in one call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnnotateTextResponse: + The text annotations response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta2/documents:annotateText', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_annotate_text(request, metadata) + pb_request = language_service.AnnotateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnnotateTextResponse() + pb_resp = language_service.AnnotateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_annotate_text(resp) + return resp + + class _ClassifyText(LanguageServiceRestStub): + def __hash__(self): + return hash("ClassifyText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.ClassifyTextRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.ClassifyTextResponse: + r"""Call the classify text method over HTTP. + + Args: + request (~.language_service.ClassifyTextRequest): + The request object. The document classification request + message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ClassifyTextResponse: + The document classification response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta2/documents:classifyText', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_classify_text(request, metadata) + pb_request = language_service.ClassifyTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.ClassifyTextResponse() + pb_resp = language_service.ClassifyTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_classify_text(resp) + return resp + + class _ModerateText(LanguageServiceRestStub): + def __hash__(self): + return hash("ModerateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.ModerateTextRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.ModerateTextResponse: + r"""Call the moderate text method over HTTP. + + Args: + request (~.language_service.ModerateTextRequest): + The request object. The document moderation request + message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ModerateTextResponse: + The document moderation response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta2/documents:moderateText', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_moderate_text(request, metadata) + pb_request = language_service.ModerateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.ModerateTextResponse() + pb_resp = language_service.ModerateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_moderate_text(resp) + return resp + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + language_service.AnalyzeEntitiesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeEntities(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + language_service.AnalyzeEntitySentimentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeEntitySentiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + language_service.AnalyzeSentimentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeSentiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + language_service.AnalyzeSyntaxResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeSyntax(self._session, self._host, self._interceptor) # type: ignore + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + language_service.AnnotateTextResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnnotateText(self._session, self._host, self._interceptor) # type: ignore + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + language_service.ClassifyTextResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ClassifyText(self._session, self._host, self._interceptor) # type: ignore + + @property + def moderate_text(self) -> Callable[ + [language_service.ModerateTextRequest], + language_service.ModerateTextResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ModerateText(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'LanguageServiceRestTransport', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py new file mode 100644 index 00000000..8dadfa8a --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .language_service import ( + AnalyzeEntitiesRequest, + AnalyzeEntitiesResponse, + AnalyzeEntitySentimentRequest, + AnalyzeEntitySentimentResponse, + AnalyzeSentimentRequest, + AnalyzeSentimentResponse, + AnalyzeSyntaxRequest, + AnalyzeSyntaxResponse, + AnnotateTextRequest, + AnnotateTextResponse, + ClassificationCategory, + ClassificationModelOptions, + ClassifyTextRequest, + ClassifyTextResponse, + DependencyEdge, + Document, + Entity, + EntityMention, + ModerateTextRequest, + ModerateTextResponse, + PartOfSpeech, + Sentence, + Sentiment, + TextSpan, + Token, + EncodingType, +) + +__all__ = ( + 'AnalyzeEntitiesRequest', + 'AnalyzeEntitiesResponse', + 'AnalyzeEntitySentimentRequest', + 'AnalyzeEntitySentimentResponse', + 'AnalyzeSentimentRequest', + 'AnalyzeSentimentResponse', + 'AnalyzeSyntaxRequest', + 'AnalyzeSyntaxResponse', + 'AnnotateTextRequest', + 'AnnotateTextResponse', + 'ClassificationCategory', + 'ClassificationModelOptions', + 'ClassifyTextRequest', + 'ClassifyTextResponse', + 'DependencyEdge', + 'Document', + 'Entity', + 'EntityMention', + 'ModerateTextRequest', + 'ModerateTextResponse', + 'PartOfSpeech', + 'Sentence', + 'Sentiment', + 'TextSpan', + 'Token', + 'EncodingType', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py new file mode 100644 index 00000000..3b27605f --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py @@ -0,0 +1,1761 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.language.v1beta2', + manifest={ + 'EncodingType', + 'Document', + 'Sentence', + 'Entity', + 'Token', + 'Sentiment', + 'PartOfSpeech', + 'DependencyEdge', + 'EntityMention', + 'TextSpan', + 'ClassificationCategory', + 'ClassificationModelOptions', + 'AnalyzeSentimentRequest', + 'AnalyzeSentimentResponse', + 'AnalyzeEntitySentimentRequest', + 'AnalyzeEntitySentimentResponse', + 'AnalyzeEntitiesRequest', + 'AnalyzeEntitiesResponse', + 'AnalyzeSyntaxRequest', + 'AnalyzeSyntaxResponse', + 'ClassifyTextRequest', + 'ClassifyTextResponse', + 'ModerateTextRequest', + 'ModerateTextResponse', + 'AnnotateTextRequest', + 'AnnotateTextResponse', + }, +) + + +class EncodingType(proto.Enum): + r"""Represents the text encoding that the caller uses to process the + output. Providing an ``EncodingType`` is recommended because the API + provides the beginning offsets for various outputs, such as tokens + and mentions, and languages that natively use different text + encodings may access offsets differently. + + Values: + NONE (0): + If ``EncodingType`` is not specified, encoding-dependent + information (such as ``begin_offset``) will be set at + ``-1``. + UTF8 (1): + Encoding-dependent information (such as ``begin_offset``) is + calculated based on the UTF-8 encoding of the input. C++ and + Go are examples of languages that use this encoding + natively. + UTF16 (2): + Encoding-dependent information (such as ``begin_offset``) is + calculated based on the UTF-16 encoding of the input. Java + and JavaScript are examples of languages that use this + encoding natively. + UTF32 (3): + Encoding-dependent information (such as ``begin_offset``) is + calculated based on the UTF-32 encoding of the input. Python + is an example of a language that uses this encoding + natively. + """ + NONE = 0 + UTF8 = 1 + UTF16 = 2 + UTF32 = 3 + + +class Document(proto.Message): + r"""Represents the input to API methods. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.language_v1beta2.types.Document.Type): + Required. If the type is not set or is ``TYPE_UNSPECIFIED``, + returns an ``INVALID_ARGUMENT`` error. + content (str): + The content of the input in string format. + Cloud audit logging exempt since it is based on + user data. + + This field is a member of `oneof`_ ``source``. + gcs_content_uri (str): + The Google Cloud Storage URI where the file content is + located. This URI must be of the form: + gs://bucket_name/object_name. For more details, see + https://cloud.google.com/storage/docs/reference-uris. NOTE: + Cloud Storage object versioning is not supported. + + This field is a member of `oneof`_ ``source``. + language (str): + The language of the document (if not specified, the language + is automatically detected). Both ISO and BCP-47 language + codes are accepted. `Language + Support `__ + lists currently supported languages for each API method. If + the language (either specified by the caller or + automatically detected) is not supported by the called API + method, an ``INVALID_ARGUMENT`` error is returned. + reference_web_uri (str): + The web URI where the document comes from. + This URI is not used for fetching the content, + but as a hint for analyzing the document. + boilerplate_handling (google.cloud.language_v1beta2.types.Document.BoilerplateHandling): + Indicates how detected boilerplate(e.g. + advertisements, copyright declarations, banners) + should be handled for this document. If not + specified, boilerplate will be treated the same + as content. + """ + class Type(proto.Enum): + r"""The document types enum. + + Values: + TYPE_UNSPECIFIED (0): + The content type is not specified. + PLAIN_TEXT (1): + Plain text + HTML (2): + HTML + """ + TYPE_UNSPECIFIED = 0 + PLAIN_TEXT = 1 + HTML = 2 + + class BoilerplateHandling(proto.Enum): + r"""Ways of handling boilerplate detected in the document + + Values: + BOILERPLATE_HANDLING_UNSPECIFIED (0): + The boilerplate handling is not specified. + SKIP_BOILERPLATE (1): + Do not analyze detected boilerplate. + Reference web URI is required for detecting + boilerplate. + KEEP_BOILERPLATE (2): + Treat boilerplate the same as content. + """ + BOILERPLATE_HANDLING_UNSPECIFIED = 0 + SKIP_BOILERPLATE = 1 + KEEP_BOILERPLATE = 2 + + type_: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + content: str = proto.Field( + proto.STRING, + number=2, + oneof='source', + ) + gcs_content_uri: str = proto.Field( + proto.STRING, + number=3, + oneof='source', + ) + language: str = proto.Field( + proto.STRING, + number=4, + ) + reference_web_uri: str = proto.Field( + proto.STRING, + number=5, + ) + boilerplate_handling: BoilerplateHandling = proto.Field( + proto.ENUM, + number=6, + enum=BoilerplateHandling, + ) + + +class Sentence(proto.Message): + r"""Represents a sentence in the input document. + + Attributes: + text (google.cloud.language_v1beta2.types.TextSpan): + The sentence text. + sentiment (google.cloud.language_v1beta2.types.Sentiment): + For calls to [AnalyzeSentiment][] or if + [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] + is set to true, this field will contain the sentiment for + the sentence. + """ + + text: 'TextSpan' = proto.Field( + proto.MESSAGE, + number=1, + message='TextSpan', + ) + sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=2, + message='Sentiment', + ) + + +class Entity(proto.Message): + r"""Represents a phrase in the text that is a known entity, such + as a person, an organization, or location. The API associates + information, such as salience and mentions, with entities. + + Attributes: + name (str): + The representative name for the entity. + type_ (google.cloud.language_v1beta2.types.Entity.Type): + The entity type. + metadata (MutableMapping[str, str]): + Metadata associated with the entity. + + For most entity types, the metadata is a Wikipedia URL + (``wikipedia_url``) and Knowledge Graph MID (``mid``), if + they are available. For the metadata associated with other + entity types, see the Type table below. + salience (float): + The salience score associated with the entity in the [0, + 1.0] range. + + The salience score for an entity provides information about + the importance or centrality of that entity to the entire + document text. Scores closer to 0 are less salient, while + scores closer to 1.0 are highly salient. + mentions (MutableSequence[google.cloud.language_v1beta2.types.EntityMention]): + The mentions of this entity in the input + document. The API currently supports proper noun + mentions. + sentiment (google.cloud.language_v1beta2.types.Sentiment): + For calls to [AnalyzeEntitySentiment][] or if + [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] + is set to true, this field will contain the aggregate + sentiment expressed for this entity in the provided + document. + """ + class Type(proto.Enum): + r"""The type of the entity. For most entity types, the associated + metadata is a Wikipedia URL (``wikipedia_url``) and Knowledge Graph + MID (``mid``). The table below lists the associated fields for + entities that have different metadata. + + Values: + UNKNOWN (0): + Unknown + PERSON (1): + Person + LOCATION (2): + Location + ORGANIZATION (3): + Organization + EVENT (4): + Event + WORK_OF_ART (5): + Artwork + CONSUMER_GOOD (6): + Consumer product + OTHER (7): + Other types of entities + PHONE_NUMBER (9): + Phone number + + The metadata lists the phone number, formatted according to + local convention, plus whichever additional elements appear + in the text: + + - ``number`` - the actual number, broken down into sections + as per local convention + - ``national_prefix`` - country code, if detected + - ``area_code`` - region or area code, if detected + - ``extension`` - phone extension (to be dialed after + connection), if detected + ADDRESS (10): + Address + + The metadata identifies the street number and locality plus + whichever additional elements appear in the text: + + - ``street_number`` - street number + - ``locality`` - city or town + - ``street_name`` - street/route name, if detected + - ``postal_code`` - postal code, if detected + - ``country`` - country, if detected< + - ``broad_region`` - administrative area, such as the + state, if detected + - ``narrow_region`` - smaller administrative area, such as + county, if detected + - ``sublocality`` - used in Asian addresses to demark a + district within a city, if detected + DATE (11): + Date + + The metadata identifies the components of the date: + + - ``year`` - four digit year, if detected + - ``month`` - two digit month number, if detected + - ``day`` - two digit day number, if detected + NUMBER (12): + Number + The metadata is the number itself. + PRICE (13): + Price + + The metadata identifies the ``value`` and ``currency``. + """ + UNKNOWN = 0 + PERSON = 1 + LOCATION = 2 + ORGANIZATION = 3 + EVENT = 4 + WORK_OF_ART = 5 + CONSUMER_GOOD = 6 + OTHER = 7 + PHONE_NUMBER = 9 + ADDRESS = 10 + DATE = 11 + NUMBER = 12 + PRICE = 13 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: Type = proto.Field( + proto.ENUM, + number=2, + enum=Type, + ) + metadata: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + salience: float = proto.Field( + proto.FLOAT, + number=4, + ) + mentions: MutableSequence['EntityMention'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='EntityMention', + ) + sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=6, + message='Sentiment', + ) + + +class Token(proto.Message): + r"""Represents the smallest syntactic building block of the text. + + Attributes: + text (google.cloud.language_v1beta2.types.TextSpan): + The token text. + part_of_speech (google.cloud.language_v1beta2.types.PartOfSpeech): + Parts of speech tag for this token. + dependency_edge (google.cloud.language_v1beta2.types.DependencyEdge): + Dependency tree parse for this token. + lemma (str): + `Lemma `__ + of the token. + """ + + text: 'TextSpan' = proto.Field( + proto.MESSAGE, + number=1, + message='TextSpan', + ) + part_of_speech: 'PartOfSpeech' = proto.Field( + proto.MESSAGE, + number=2, + message='PartOfSpeech', + ) + dependency_edge: 'DependencyEdge' = proto.Field( + proto.MESSAGE, + number=3, + message='DependencyEdge', + ) + lemma: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Sentiment(proto.Message): + r"""Represents the feeling associated with the entire text or + entities in the text. + Next ID: 6 + + Attributes: + magnitude (float): + A non-negative number in the [0, +inf) range, which + represents the absolute magnitude of sentiment regardless of + score (positive or negative). + score (float): + Sentiment score between -1.0 (negative + sentiment) and 1.0 (positive sentiment). + """ + + magnitude: float = proto.Field( + proto.FLOAT, + number=2, + ) + score: float = proto.Field( + proto.FLOAT, + number=3, + ) + + +class PartOfSpeech(proto.Message): + r"""Represents part of speech information for a token. + + Attributes: + tag (google.cloud.language_v1beta2.types.PartOfSpeech.Tag): + The part of speech tag. + aspect (google.cloud.language_v1beta2.types.PartOfSpeech.Aspect): + The grammatical aspect. + case (google.cloud.language_v1beta2.types.PartOfSpeech.Case): + The grammatical case. + form (google.cloud.language_v1beta2.types.PartOfSpeech.Form): + The grammatical form. + gender (google.cloud.language_v1beta2.types.PartOfSpeech.Gender): + The grammatical gender. + mood (google.cloud.language_v1beta2.types.PartOfSpeech.Mood): + The grammatical mood. + number (google.cloud.language_v1beta2.types.PartOfSpeech.Number): + The grammatical number. + person (google.cloud.language_v1beta2.types.PartOfSpeech.Person): + The grammatical person. + proper (google.cloud.language_v1beta2.types.PartOfSpeech.Proper): + The grammatical properness. + reciprocity (google.cloud.language_v1beta2.types.PartOfSpeech.Reciprocity): + The grammatical reciprocity. + tense (google.cloud.language_v1beta2.types.PartOfSpeech.Tense): + The grammatical tense. + voice (google.cloud.language_v1beta2.types.PartOfSpeech.Voice): + The grammatical voice. + """ + class Tag(proto.Enum): + r"""The part of speech tags enum. + + Values: + UNKNOWN (0): + Unknown + ADJ (1): + Adjective + ADP (2): + Adposition (preposition and postposition) + ADV (3): + Adverb + CONJ (4): + Conjunction + DET (5): + Determiner + NOUN (6): + Noun (common and proper) + NUM (7): + Cardinal number + PRON (8): + Pronoun + PRT (9): + Particle or other function word + PUNCT (10): + Punctuation + VERB (11): + Verb (all tenses and modes) + X (12): + Other: foreign words, typos, abbreviations + AFFIX (13): + Affix + """ + UNKNOWN = 0 + ADJ = 1 + ADP = 2 + ADV = 3 + CONJ = 4 + DET = 5 + NOUN = 6 + NUM = 7 + PRON = 8 + PRT = 9 + PUNCT = 10 + VERB = 11 + X = 12 + AFFIX = 13 + + class Aspect(proto.Enum): + r"""The characteristic of a verb that expresses time flow during + an event. + + Values: + ASPECT_UNKNOWN (0): + Aspect is not applicable in the analyzed + language or is not predicted. + PERFECTIVE (1): + Perfective + IMPERFECTIVE (2): + Imperfective + PROGRESSIVE (3): + Progressive + """ + ASPECT_UNKNOWN = 0 + PERFECTIVE = 1 + IMPERFECTIVE = 2 + PROGRESSIVE = 3 + + class Case(proto.Enum): + r"""The grammatical function performed by a noun or pronoun in a + phrase, clause, or sentence. In some languages, other parts of + speech, such as adjective and determiner, take case inflection + in agreement with the noun. + + Values: + CASE_UNKNOWN (0): + Case is not applicable in the analyzed + language or is not predicted. + ACCUSATIVE (1): + Accusative + ADVERBIAL (2): + Adverbial + COMPLEMENTIVE (3): + Complementive + DATIVE (4): + Dative + GENITIVE (5): + Genitive + INSTRUMENTAL (6): + Instrumental + LOCATIVE (7): + Locative + NOMINATIVE (8): + Nominative + OBLIQUE (9): + Oblique + PARTITIVE (10): + Partitive + PREPOSITIONAL (11): + Prepositional + REFLEXIVE_CASE (12): + Reflexive + RELATIVE_CASE (13): + Relative + VOCATIVE (14): + Vocative + """ + CASE_UNKNOWN = 0 + ACCUSATIVE = 1 + ADVERBIAL = 2 + COMPLEMENTIVE = 3 + DATIVE = 4 + GENITIVE = 5 + INSTRUMENTAL = 6 + LOCATIVE = 7 + NOMINATIVE = 8 + OBLIQUE = 9 + PARTITIVE = 10 + PREPOSITIONAL = 11 + REFLEXIVE_CASE = 12 + RELATIVE_CASE = 13 + VOCATIVE = 14 + + class Form(proto.Enum): + r"""Depending on the language, Form can be categorizing different + forms of verbs, adjectives, adverbs, etc. For example, + categorizing inflected endings of verbs and adjectives or + distinguishing between short and long forms of adjectives and + participles + + Values: + FORM_UNKNOWN (0): + Form is not applicable in the analyzed + language or is not predicted. + ADNOMIAL (1): + Adnomial + AUXILIARY (2): + Auxiliary + COMPLEMENTIZER (3): + Complementizer + FINAL_ENDING (4): + Final ending + GERUND (5): + Gerund + REALIS (6): + Realis + IRREALIS (7): + Irrealis + SHORT (8): + Short form + LONG (9): + Long form + ORDER (10): + Order form + SPECIFIC (11): + Specific form + """ + FORM_UNKNOWN = 0 + ADNOMIAL = 1 + AUXILIARY = 2 + COMPLEMENTIZER = 3 + FINAL_ENDING = 4 + GERUND = 5 + REALIS = 6 + IRREALIS = 7 + SHORT = 8 + LONG = 9 + ORDER = 10 + SPECIFIC = 11 + + class Gender(proto.Enum): + r"""Gender classes of nouns reflected in the behaviour of + associated words. + + Values: + GENDER_UNKNOWN (0): + Gender is not applicable in the analyzed + language or is not predicted. + FEMININE (1): + Feminine + MASCULINE (2): + Masculine + NEUTER (3): + Neuter + """ + GENDER_UNKNOWN = 0 + FEMININE = 1 + MASCULINE = 2 + NEUTER = 3 + + class Mood(proto.Enum): + r"""The grammatical feature of verbs, used for showing modality + and attitude. + + Values: + MOOD_UNKNOWN (0): + Mood is not applicable in the analyzed + language or is not predicted. + CONDITIONAL_MOOD (1): + Conditional + IMPERATIVE (2): + Imperative + INDICATIVE (3): + Indicative + INTERROGATIVE (4): + Interrogative + JUSSIVE (5): + Jussive + SUBJUNCTIVE (6): + Subjunctive + """ + MOOD_UNKNOWN = 0 + CONDITIONAL_MOOD = 1 + IMPERATIVE = 2 + INDICATIVE = 3 + INTERROGATIVE = 4 + JUSSIVE = 5 + SUBJUNCTIVE = 6 + + class Number(proto.Enum): + r"""Count distinctions. + + Values: + NUMBER_UNKNOWN (0): + Number is not applicable in the analyzed + language or is not predicted. + SINGULAR (1): + Singular + PLURAL (2): + Plural + DUAL (3): + Dual + """ + NUMBER_UNKNOWN = 0 + SINGULAR = 1 + PLURAL = 2 + DUAL = 3 + + class Person(proto.Enum): + r"""The distinction between the speaker, second person, third + person, etc. + + Values: + PERSON_UNKNOWN (0): + Person is not applicable in the analyzed + language or is not predicted. + FIRST (1): + First + SECOND (2): + Second + THIRD (3): + Third + REFLEXIVE_PERSON (4): + Reflexive + """ + PERSON_UNKNOWN = 0 + FIRST = 1 + SECOND = 2 + THIRD = 3 + REFLEXIVE_PERSON = 4 + + class Proper(proto.Enum): + r"""This category shows if the token is part of a proper name. + + Values: + PROPER_UNKNOWN (0): + Proper is not applicable in the analyzed + language or is not predicted. + PROPER (1): + Proper + NOT_PROPER (2): + Not proper + """ + PROPER_UNKNOWN = 0 + PROPER = 1 + NOT_PROPER = 2 + + class Reciprocity(proto.Enum): + r"""Reciprocal features of a pronoun. + + Values: + RECIPROCITY_UNKNOWN (0): + Reciprocity is not applicable in the analyzed + language or is not predicted. + RECIPROCAL (1): + Reciprocal + NON_RECIPROCAL (2): + Non-reciprocal + """ + RECIPROCITY_UNKNOWN = 0 + RECIPROCAL = 1 + NON_RECIPROCAL = 2 + + class Tense(proto.Enum): + r"""Time reference. + + Values: + TENSE_UNKNOWN (0): + Tense is not applicable in the analyzed + language or is not predicted. + CONDITIONAL_TENSE (1): + Conditional + FUTURE (2): + Future + PAST (3): + Past + PRESENT (4): + Present + IMPERFECT (5): + Imperfect + PLUPERFECT (6): + Pluperfect + """ + TENSE_UNKNOWN = 0 + CONDITIONAL_TENSE = 1 + FUTURE = 2 + PAST = 3 + PRESENT = 4 + IMPERFECT = 5 + PLUPERFECT = 6 + + class Voice(proto.Enum): + r"""The relationship between the action that a verb expresses and + the participants identified by its arguments. + + Values: + VOICE_UNKNOWN (0): + Voice is not applicable in the analyzed + language or is not predicted. + ACTIVE (1): + Active + CAUSATIVE (2): + Causative + PASSIVE (3): + Passive + """ + VOICE_UNKNOWN = 0 + ACTIVE = 1 + CAUSATIVE = 2 + PASSIVE = 3 + + tag: Tag = proto.Field( + proto.ENUM, + number=1, + enum=Tag, + ) + aspect: Aspect = proto.Field( + proto.ENUM, + number=2, + enum=Aspect, + ) + case: Case = proto.Field( + proto.ENUM, + number=3, + enum=Case, + ) + form: Form = proto.Field( + proto.ENUM, + number=4, + enum=Form, + ) + gender: Gender = proto.Field( + proto.ENUM, + number=5, + enum=Gender, + ) + mood: Mood = proto.Field( + proto.ENUM, + number=6, + enum=Mood, + ) + number: Number = proto.Field( + proto.ENUM, + number=7, + enum=Number, + ) + person: Person = proto.Field( + proto.ENUM, + number=8, + enum=Person, + ) + proper: Proper = proto.Field( + proto.ENUM, + number=9, + enum=Proper, + ) + reciprocity: Reciprocity = proto.Field( + proto.ENUM, + number=10, + enum=Reciprocity, + ) + tense: Tense = proto.Field( + proto.ENUM, + number=11, + enum=Tense, + ) + voice: Voice = proto.Field( + proto.ENUM, + number=12, + enum=Voice, + ) + + +class DependencyEdge(proto.Message): + r"""Represents dependency parse tree information for a token. + + Attributes: + head_token_index (int): + Represents the head of this token in the dependency tree. + This is the index of the token which has an arc going to + this token. The index is the position of the token in the + array of tokens returned by the API method. If this token is + a root token, then the ``head_token_index`` is its own + index. + label (google.cloud.language_v1beta2.types.DependencyEdge.Label): + The parse label for the token. + """ + class Label(proto.Enum): + r"""The parse label enum for the token. + + Values: + UNKNOWN (0): + Unknown + ABBREV (1): + Abbreviation modifier + ACOMP (2): + Adjectival complement + ADVCL (3): + Adverbial clause modifier + ADVMOD (4): + Adverbial modifier + AMOD (5): + Adjectival modifier of an NP + APPOS (6): + Appositional modifier of an NP + ATTR (7): + Attribute dependent of a copular verb + AUX (8): + Auxiliary (non-main) verb + AUXPASS (9): + Passive auxiliary + CC (10): + Coordinating conjunction + CCOMP (11): + Clausal complement of a verb or adjective + CONJ (12): + Conjunct + CSUBJ (13): + Clausal subject + CSUBJPASS (14): + Clausal passive subject + DEP (15): + Dependency (unable to determine) + DET (16): + Determiner + DISCOURSE (17): + Discourse + DOBJ (18): + Direct object + EXPL (19): + Expletive + GOESWITH (20): + Goes with (part of a word in a text not well + edited) + IOBJ (21): + Indirect object + MARK (22): + Marker (word introducing a subordinate + clause) + MWE (23): + Multi-word expression + MWV (24): + Multi-word verbal expression + NEG (25): + Negation modifier + NN (26): + Noun compound modifier + NPADVMOD (27): + Noun phrase used as an adverbial modifier + NSUBJ (28): + Nominal subject + NSUBJPASS (29): + Passive nominal subject + NUM (30): + Numeric modifier of a noun + NUMBER (31): + Element of compound number + P (32): + Punctuation mark + PARATAXIS (33): + Parataxis relation + PARTMOD (34): + Participial modifier + PCOMP (35): + The complement of a preposition is a clause + POBJ (36): + Object of a preposition + POSS (37): + Possession modifier + POSTNEG (38): + Postverbal negative particle + PRECOMP (39): + Predicate complement + PRECONJ (40): + Preconjunt + PREDET (41): + Predeterminer + PREF (42): + Prefix + PREP (43): + Prepositional modifier + PRONL (44): + The relationship between a verb and verbal + morpheme + PRT (45): + Particle + PS (46): + Associative or possessive marker + QUANTMOD (47): + Quantifier phrase modifier + RCMOD (48): + Relative clause modifier + RCMODREL (49): + Complementizer in relative clause + RDROP (50): + Ellipsis without a preceding predicate + REF (51): + Referent + REMNANT (52): + Remnant + REPARANDUM (53): + Reparandum + ROOT (54): + Root + SNUM (55): + Suffix specifying a unit of number + SUFF (56): + Suffix + TMOD (57): + Temporal modifier + TOPIC (58): + Topic marker + VMOD (59): + Clause headed by an infinite form of the verb + that modifies a noun + VOCATIVE (60): + Vocative + XCOMP (61): + Open clausal complement + SUFFIX (62): + Name suffix + TITLE (63): + Name title + ADVPHMOD (64): + Adverbial phrase modifier + AUXCAUS (65): + Causative auxiliary + AUXVV (66): + Helper auxiliary + DTMOD (67): + Rentaishi (Prenominal modifier) + FOREIGN (68): + Foreign words + KW (69): + Keyword + LIST (70): + List for chains of comparable items + NOMC (71): + Nominalized clause + NOMCSUBJ (72): + Nominalized clausal subject + NOMCSUBJPASS (73): + Nominalized clausal passive + NUMC (74): + Compound of numeric modifier + COP (75): + Copula + DISLOCATED (76): + Dislocated relation (for fronted/topicalized + elements) + ASP (77): + Aspect marker + GMOD (78): + Genitive modifier + GOBJ (79): + Genitive object + INFMOD (80): + Infinitival modifier + MES (81): + Measure + NCOMP (82): + Nominal complement of a noun + """ + UNKNOWN = 0 + ABBREV = 1 + ACOMP = 2 + ADVCL = 3 + ADVMOD = 4 + AMOD = 5 + APPOS = 6 + ATTR = 7 + AUX = 8 + AUXPASS = 9 + CC = 10 + CCOMP = 11 + CONJ = 12 + CSUBJ = 13 + CSUBJPASS = 14 + DEP = 15 + DET = 16 + DISCOURSE = 17 + DOBJ = 18 + EXPL = 19 + GOESWITH = 20 + IOBJ = 21 + MARK = 22 + MWE = 23 + MWV = 24 + NEG = 25 + NN = 26 + NPADVMOD = 27 + NSUBJ = 28 + NSUBJPASS = 29 + NUM = 30 + NUMBER = 31 + P = 32 + PARATAXIS = 33 + PARTMOD = 34 + PCOMP = 35 + POBJ = 36 + POSS = 37 + POSTNEG = 38 + PRECOMP = 39 + PRECONJ = 40 + PREDET = 41 + PREF = 42 + PREP = 43 + PRONL = 44 + PRT = 45 + PS = 46 + QUANTMOD = 47 + RCMOD = 48 + RCMODREL = 49 + RDROP = 50 + REF = 51 + REMNANT = 52 + REPARANDUM = 53 + ROOT = 54 + SNUM = 55 + SUFF = 56 + TMOD = 57 + TOPIC = 58 + VMOD = 59 + VOCATIVE = 60 + XCOMP = 61 + SUFFIX = 62 + TITLE = 63 + ADVPHMOD = 64 + AUXCAUS = 65 + AUXVV = 66 + DTMOD = 67 + FOREIGN = 68 + KW = 69 + LIST = 70 + NOMC = 71 + NOMCSUBJ = 72 + NOMCSUBJPASS = 73 + NUMC = 74 + COP = 75 + DISLOCATED = 76 + ASP = 77 + GMOD = 78 + GOBJ = 79 + INFMOD = 80 + MES = 81 + NCOMP = 82 + + head_token_index: int = proto.Field( + proto.INT32, + number=1, + ) + label: Label = proto.Field( + proto.ENUM, + number=2, + enum=Label, + ) + + +class EntityMention(proto.Message): + r"""Represents a mention for an entity in the text. Currently, + proper noun mentions are supported. + + Attributes: + text (google.cloud.language_v1beta2.types.TextSpan): + The mention text. + type_ (google.cloud.language_v1beta2.types.EntityMention.Type): + The type of the entity mention. + sentiment (google.cloud.language_v1beta2.types.Sentiment): + For calls to [AnalyzeEntitySentiment][] or if + [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] + is set to true, this field will contain the sentiment + expressed for this mention of the entity in the provided + document. + """ + class Type(proto.Enum): + r"""The supported types of mentions. + + Values: + TYPE_UNKNOWN (0): + Unknown + PROPER (1): + Proper name + COMMON (2): + Common noun (or noun compound) + """ + TYPE_UNKNOWN = 0 + PROPER = 1 + COMMON = 2 + + text: 'TextSpan' = proto.Field( + proto.MESSAGE, + number=1, + message='TextSpan', + ) + type_: Type = proto.Field( + proto.ENUM, + number=2, + enum=Type, + ) + sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=3, + message='Sentiment', + ) + + +class TextSpan(proto.Message): + r"""Represents an output piece of text. + + Attributes: + content (str): + The content of the output text. + begin_offset (int): + The API calculates the beginning offset of the content in + the original document according to the + [EncodingType][google.cloud.language.v1beta2.EncodingType] + specified in the API request. + """ + + content: str = proto.Field( + proto.STRING, + number=1, + ) + begin_offset: int = proto.Field( + proto.INT32, + number=2, + ) + + +class ClassificationCategory(proto.Message): + r"""Represents a category returned from the text classifier. + + Attributes: + name (str): + The name of the category representing the + document. + confidence (float): + The classifier's confidence of the category. + Number represents how certain the classifier is + that this category represents the given text. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class ClassificationModelOptions(proto.Message): + r"""Model options available for classification requests. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + v1_model (google.cloud.language_v1beta2.types.ClassificationModelOptions.V1Model): + Setting this field will use the V1 model and + V1 content categories version. The V1 model is a + legacy model; support for this will be + discontinued in the future. + + This field is a member of `oneof`_ ``model_type``. + v2_model (google.cloud.language_v1beta2.types.ClassificationModelOptions.V2Model): + Setting this field will use the V2 model with + the appropriate content categories version. The + V2 model is a better performing model. + + This field is a member of `oneof`_ ``model_type``. + """ + + class V1Model(proto.Message): + r"""Options for the V1 model. + """ + + class V2Model(proto.Message): + r"""Options for the V2 model. + + Attributes: + content_categories_version (google.cloud.language_v1beta2.types.ClassificationModelOptions.V2Model.ContentCategoriesVersion): + The content categories used for + classification. + """ + class ContentCategoriesVersion(proto.Enum): + r"""The content categories used for classification. + + Values: + CONTENT_CATEGORIES_VERSION_UNSPECIFIED (0): + If ``ContentCategoriesVersion`` is not specified, this + option will default to ``V1``. + V1 (1): + Legacy content categories of our initial + launch in 2017. + V2 (2): + Updated content categories in 2022. + """ + CONTENT_CATEGORIES_VERSION_UNSPECIFIED = 0 + V1 = 1 + V2 = 2 + + content_categories_version: 'ClassificationModelOptions.V2Model.ContentCategoriesVersion' = proto.Field( + proto.ENUM, + number=1, + enum='ClassificationModelOptions.V2Model.ContentCategoriesVersion', + ) + + v1_model: V1Model = proto.Field( + proto.MESSAGE, + number=1, + oneof='model_type', + message=V1Model, + ) + v2_model: V2Model = proto.Field( + proto.MESSAGE, + number=2, + oneof='model_type', + message=V2Model, + ) + + +class AnalyzeSentimentRequest(proto.Message): + r"""The sentiment analysis request message. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate sentence offsets for the sentence + sentiment. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeSentimentResponse(proto.Message): + r"""The sentiment analysis response message. + + Attributes: + document_sentiment (google.cloud.language_v1beta2.types.Sentiment): + The overall sentiment of the input document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + sentences (MutableSequence[google.cloud.language_v1beta2.types.Sentence]): + The sentiment for all the sentences in the + document. + """ + + document_sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=1, + message='Sentiment', + ) + language: str = proto.Field( + proto.STRING, + number=2, + ) + sentences: MutableSequence['Sentence'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Sentence', + ) + + +class AnalyzeEntitySentimentRequest(proto.Message): + r"""The entity-level sentiment analysis request message. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeEntitySentimentResponse(proto.Message): + r"""The entity-level sentiment analysis response message. + + Attributes: + entities (MutableSequence[google.cloud.language_v1beta2.types.Entity]): + The recognized entities in the input document + with associated sentiments. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + """ + + entities: MutableSequence['Entity'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Entity', + ) + language: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AnalyzeEntitiesRequest(proto.Message): + r"""The entity analysis request message. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeEntitiesResponse(proto.Message): + r"""The entity analysis response message. + + Attributes: + entities (MutableSequence[google.cloud.language_v1beta2.types.Entity]): + The recognized entities in the input + document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + """ + + entities: MutableSequence['Entity'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Entity', + ) + language: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AnalyzeSyntaxRequest(proto.Message): + r"""The syntax analysis request message. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeSyntaxResponse(proto.Message): + r"""The syntax analysis response message. + + Attributes: + sentences (MutableSequence[google.cloud.language_v1beta2.types.Sentence]): + Sentences in the input document. + tokens (MutableSequence[google.cloud.language_v1beta2.types.Token]): + Tokens, along with their syntactic + information, in the input document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + """ + + sentences: MutableSequence['Sentence'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Sentence', + ) + tokens: MutableSequence['Token'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='Token', + ) + language: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ClassifyTextRequest(proto.Message): + r"""The document classification request message. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + classification_model_options (google.cloud.language_v1beta2.types.ClassificationModelOptions): + Model options to use for classification. + Defaults to v1 options if not specified. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + classification_model_options: 'ClassificationModelOptions' = proto.Field( + proto.MESSAGE, + number=3, + message='ClassificationModelOptions', + ) + + +class ClassifyTextResponse(proto.Message): + r"""The document classification response message. + + Attributes: + categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): + Categories representing the input document. + """ + + categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ClassificationCategory', + ) + + +class ModerateTextRequest(proto.Message): + r"""The document moderation request message. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + + +class ModerateTextResponse(proto.Message): + r"""The document moderation response message. + + Attributes: + moderation_categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): + Harmful and sensitive categories representing + the input document. + """ + + moderation_categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ClassificationCategory', + ) + + +class AnnotateTextRequest(proto.Message): + r"""The request message for the text annotation API, which can + perform multiple analysis types (sentiment, entities, and + syntax) in one call. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + features (google.cloud.language_v1beta2.types.AnnotateTextRequest.Features): + Required. The enabled features. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + class Features(proto.Message): + r"""All available features for sentiment, syntax, and semantic + analysis. Setting each one to true will enable that specific + analysis for the input. Next ID: 12 + + Attributes: + extract_syntax (bool): + Extract syntax information. + extract_entities (bool): + Extract entities. + extract_document_sentiment (bool): + Extract document-level sentiment. + extract_entity_sentiment (bool): + Extract entities and their associated + sentiment. + classify_text (bool): + Classify the full document into categories. If this is true, + the API will use the default model which classifies into a + `predefined + taxonomy `__. + moderate_text (bool): + Moderate the document for harmful and + sensitive categories. + classification_model_options (google.cloud.language_v1beta2.types.ClassificationModelOptions): + The model options to use for classification. Defaults to v1 + options if not specified. Only used if ``classify_text`` is + set to true. + """ + + extract_syntax: bool = proto.Field( + proto.BOOL, + number=1, + ) + extract_entities: bool = proto.Field( + proto.BOOL, + number=2, + ) + extract_document_sentiment: bool = proto.Field( + proto.BOOL, + number=3, + ) + extract_entity_sentiment: bool = proto.Field( + proto.BOOL, + number=4, + ) + classify_text: bool = proto.Field( + proto.BOOL, + number=6, + ) + moderate_text: bool = proto.Field( + proto.BOOL, + number=11, + ) + classification_model_options: 'ClassificationModelOptions' = proto.Field( + proto.MESSAGE, + number=10, + message='ClassificationModelOptions', + ) + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + features: Features = proto.Field( + proto.MESSAGE, + number=2, + message=Features, + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=3, + enum='EncodingType', + ) + + +class AnnotateTextResponse(proto.Message): + r"""The text annotations response message. + + Attributes: + sentences (MutableSequence[google.cloud.language_v1beta2.types.Sentence]): + Sentences in the input document. Populated if the user + enables + [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax]. + tokens (MutableSequence[google.cloud.language_v1beta2.types.Token]): + Tokens, along with their syntactic information, in the input + document. Populated if the user enables + [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax]. + entities (MutableSequence[google.cloud.language_v1beta2.types.Entity]): + Entities, along with their semantic information, in the + input document. Populated if the user enables + [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entities]. + document_sentiment (google.cloud.language_v1beta2.types.Sentiment): + The overall sentiment for the document. Populated if the + user enables + [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment]. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): + Categories identified in the input document. + moderation_categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): + Harmful and sensitive categories identified + in the input document. + """ + + sentences: MutableSequence['Sentence'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Sentence', + ) + tokens: MutableSequence['Token'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='Token', + ) + entities: MutableSequence['Entity'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Entity', + ) + document_sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=4, + message='Sentiment', + ) + language: str = proto.Field( + proto.STRING, + number=5, + ) + categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='ClassificationCategory', + ) + moderation_categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message='ClassificationCategory', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta2/mypy.ini b/owl-bot-staging/v1beta2/mypy.ini new file mode 100644 index 00000000..574c5aed --- /dev/null +++ b/owl-bot-staging/v1beta2/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v1beta2/noxfile.py b/owl-bot-staging/v1beta2/noxfile.py new file mode 100644 index 00000000..95cd6c8b --- /dev/null +++ b/owl-bot-staging/v1beta2/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/language_v1beta2/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py new file mode 100644 index 00000000..ef2d4a6d --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +async def sample_analyze_entities(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entities(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeEntities_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py new file mode 100644 index 00000000..b8c2694b --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +def sample_analyze_entities(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = client.analyze_entities(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeEntities_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py new file mode 100644 index 00000000..818d4209 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntitySentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +async def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py new file mode 100644 index 00000000..cabc3ff5 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntitySentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py new file mode 100644 index 00000000..b60e606a --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeSentiment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +async def sample_analyze_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeSentiment_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py new file mode 100644 index 00000000..df735913 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeSentiment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +def sample_analyze_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeSentiment_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py new file mode 100644 index 00000000..e42a0728 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSyntax +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeSyntax_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +async def sample_analyze_syntax(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = await client.analyze_syntax(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeSyntax_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py new file mode 100644 index 00000000..f9ed77cc --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSyntax +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeSyntax_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +def sample_analyze_syntax(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = client.analyze_syntax(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeSyntax_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py new file mode 100644 index 00000000..5b17e2b1 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnnotateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnnotateText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +async def sample_annotate_text(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = await client.annotate_text(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnnotateText_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py new file mode 100644 index 00000000..701c94e5 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnnotateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnnotateText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +def sample_annotate_text(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = client.annotate_text(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnnotateText_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py new file mode 100644 index 00000000..94b5ebcc --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ClassifyText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_ClassifyText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +async def sample_classify_text(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = await client.classify_text(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_ClassifyText_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py new file mode 100644 index 00000000..f9415093 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ClassifyText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_ClassifyText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +def sample_classify_text(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = client.classify_text(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_ClassifyText_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py new file mode 100644 index 00000000..d8385285 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModerateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_ModerateText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +async def sample_moderate_text(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ModerateTextRequest( + document=document, + ) + + # Make the request + response = await client.moderate_text(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_ModerateText_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py new file mode 100644 index 00000000..78d11521 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModerateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_ModerateText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +def sample_moderate_text(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ModerateTextRequest( + document=document, + ) + + # Make the request + response = client.moderate_text(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_ModerateText_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json b/owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json new file mode 100644 index 00000000..fb6633f2 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json @@ -0,0 +1,1190 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.language.v1beta2", + "version": "v1beta2" + } + ], + "language": "PYTHON", + "name": "google-cloud-language", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_entities", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntities", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse", + "shortName": "analyze_entities" + }, + "description": "Sample for AnalyzeEntities", + "file": "language_v1beta2_generated_language_service_analyze_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntities_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_entities", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntities", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse", + "shortName": "analyze_entities" + }, + "description": "Sample for AnalyzeEntities", + "file": "language_v1beta2_generated_language_service_analyze_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntities_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_entity_sentiment", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntitySentiment", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntitySentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse", + "shortName": "analyze_entity_sentiment" + }, + "description": "Sample for AnalyzeEntitySentiment", + "file": "language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_entity_sentiment", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntitySentiment", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntitySentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse", + "shortName": "analyze_entity_sentiment" + }, + "description": "Sample for AnalyzeEntitySentiment", + "file": "language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_sentiment", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSentiment", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeSentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeSentimentResponse", + "shortName": "analyze_sentiment" + }, + "description": "Sample for AnalyzeSentiment", + "file": "language_v1beta2_generated_language_service_analyze_sentiment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSentiment_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_sentiment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_sentiment", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSentiment", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeSentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeSentimentResponse", + "shortName": "analyze_sentiment" + }, + "description": "Sample for AnalyzeSentiment", + "file": "language_v1beta2_generated_language_service_analyze_sentiment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSentiment_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_sentiment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_syntax", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSyntax", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSyntax" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse", + "shortName": "analyze_syntax" + }, + "description": "Sample for AnalyzeSyntax", + "file": "language_v1beta2_generated_language_service_analyze_syntax_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSyntax_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_syntax_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_syntax", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSyntax", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSyntax" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse", + "shortName": "analyze_syntax" + }, + "description": "Sample for AnalyzeSyntax", + "file": "language_v1beta2_generated_language_service_analyze_syntax_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSyntax_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_syntax_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.annotate_text", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnnotateText", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnnotateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "features", + "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest.Features" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnnotateTextResponse", + "shortName": "annotate_text" + }, + "description": "Sample for AnnotateText", + "file": "language_v1beta2_generated_language_service_annotate_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnnotateText_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_annotate_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.annotate_text", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnnotateText", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnnotateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "features", + "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest.Features" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnnotateTextResponse", + "shortName": "annotate_text" + }, + "description": "Sample for AnnotateText", + "file": "language_v1beta2_generated_language_service_annotate_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnnotateText_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_annotate_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.classify_text", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.ClassifyText", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ClassifyText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.ClassifyTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.ClassifyTextResponse", + "shortName": "classify_text" + }, + "description": "Sample for ClassifyText", + "file": "language_v1beta2_generated_language_service_classify_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_ClassifyText_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_classify_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.classify_text", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.ClassifyText", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ClassifyText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.ClassifyTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.ClassifyTextResponse", + "shortName": "classify_text" + }, + "description": "Sample for ClassifyText", + "file": "language_v1beta2_generated_language_service_classify_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_ClassifyText_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_classify_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.moderate_text", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.ModerateText", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ModerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.ModerateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.ModerateTextResponse", + "shortName": "moderate_text" + }, + "description": "Sample for ModerateText", + "file": "language_v1beta2_generated_language_service_moderate_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_ModerateText_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_moderate_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.moderate_text", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.ModerateText", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ModerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.ModerateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.ModerateTextResponse", + "shortName": "moderate_text" + }, + "description": "Sample for ModerateText", + "file": "language_v1beta2_generated_language_service_moderate_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_ModerateText_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_moderate_text_sync.py" + } + ] +} diff --git a/owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py b/owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py new file mode 100644 index 00000000..10fa218c --- /dev/null +++ b/owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py @@ -0,0 +1,182 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class languageCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'analyze_entities': ('document', 'encoding_type', ), + 'analyze_entity_sentiment': ('document', 'encoding_type', ), + 'analyze_sentiment': ('document', 'encoding_type', ), + 'analyze_syntax': ('document', 'encoding_type', ), + 'annotate_text': ('document', 'features', 'encoding_type', ), + 'classify_text': ('document', 'classification_model_options', ), + 'moderate_text': ('document', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=languageCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the language client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1beta2/setup.py b/owl-bot-staging/v1beta2/setup.py new file mode 100644 index 00000000..047e5bce --- /dev/null +++ b/owl-bot-staging/v1beta2/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-language' + + +description = "Google Cloud Language API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/language/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-language" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.10.txt b/owl-bot-staging/v1beta2/testing/constraints-3.10.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1beta2/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.11.txt b/owl-bot-staging/v1beta2/testing/constraints-3.11.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1beta2/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.12.txt b/owl-bot-staging/v1beta2/testing/constraints-3.12.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1beta2/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.7.txt b/owl-bot-staging/v1beta2/testing/constraints-3.7.txt new file mode 100644 index 00000000..6c44adfe --- /dev/null +++ b/owl-bot-staging/v1beta2/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.8.txt b/owl-bot-staging/v1beta2/testing/constraints-3.8.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1beta2/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.9.txt b/owl-bot-staging/v1beta2/testing/constraints-3.9.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1beta2/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1beta2/tests/__init__.py b/owl-bot-staging/v1beta2/tests/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1beta2/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta2/tests/unit/__init__.py b/owl-bot-staging/v1beta2/tests/unit/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1beta2/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py b/owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py b/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py b/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py new file mode 100644 index 00000000..3e0b7671 --- /dev/null +++ b/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py @@ -0,0 +1,4070 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.language_v1beta2.services.language_service import LanguageServiceAsyncClient +from google.cloud.language_v1beta2.services.language_service import LanguageServiceClient +from google.cloud.language_v1beta2.services.language_service import transports +from google.cloud.language_v1beta2.types import language_service +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LanguageServiceClient._get_default_mtls_endpoint(None) is None + assert LanguageServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (LanguageServiceClient, "grpc"), + (LanguageServiceAsyncClient, "grpc_asyncio"), + (LanguageServiceClient, "rest"), +]) +def test_language_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'language.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://language.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.LanguageServiceGrpcTransport, "grpc"), + (transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.LanguageServiceRestTransport, "rest"), +]) +def test_language_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (LanguageServiceClient, "grpc"), + (LanguageServiceAsyncClient, "grpc_asyncio"), + (LanguageServiceClient, "rest"), +]) +def test_language_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'language.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://language.googleapis.com' + ) + + +def test_language_service_client_get_transport_class(): + transport = LanguageServiceClient.get_transport_class() + available_transports = [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceRestTransport, + ] + assert transport in available_transports + + transport = LanguageServiceClient.get_transport_class("grpc") + assert transport == transports.LanguageServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), +]) +@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) +@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) +def test_language_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "true"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "false"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "true"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) +@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_language_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + LanguageServiceClient, LanguageServiceAsyncClient +]) +@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) +@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) +def test_language_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), +]) +def test_language_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", None), +]) +def test_language_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_language_service_client_client_options_from_dict(): + with mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = LanguageServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_language_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "language.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="language.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSentimentRequest, + dict, +]) +def test_analyze_sentiment(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse( + language='language_value', + ) + response = client.analyze_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_sentiment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + client.analyze_sentiment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSentimentRequest() + +@pytest.mark.asyncio +async def test_analyze_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSentimentRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse( + language='language_value', + )) + response = await client.analyze_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_sentiment_async_from_dict(): + await test_analyze_sentiment_async(request_type=dict) + + +def test_analyze_sentiment_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_sentiment_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_sentiment_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_sentiment_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitiesRequest, + dict, +]) +def test_analyze_entities(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse( + language='language_value', + ) + response = client.analyze_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + assert response.language == 'language_value' + + +def test_analyze_entities_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + client.analyze_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitiesRequest() + +@pytest.mark.asyncio +async def test_analyze_entities_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitiesRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse( + language='language_value', + )) + response = await client.analyze_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_entities_async_from_dict(): + await test_analyze_entities_async(request_type=dict) + + +def test_analyze_entities_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_entities( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_entities_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_entities_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_entities( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_entities_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitySentimentRequest, + dict, +]) +def test_analyze_entity_sentiment(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse( + language='language_value', + ) + response = client.analyze_entity_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_entity_sentiment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + client.analyze_entity_sentiment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitySentimentRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse( + language='language_value', + )) + response = await client.analyze_entity_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_async_from_dict(): + await test_analyze_entity_sentiment_async(request_type=dict) + + +def test_analyze_entity_sentiment_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_entity_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_entity_sentiment_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_entity_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSyntaxRequest, + dict, +]) +def test_analyze_syntax(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse( + language='language_value', + ) + response = client.analyze_syntax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSyntaxRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + assert response.language == 'language_value' + + +def test_analyze_syntax_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + client.analyze_syntax() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSyntaxRequest() + +@pytest.mark.asyncio +async def test_analyze_syntax_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSyntaxRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse( + language='language_value', + )) + response = await client.analyze_syntax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSyntaxRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_syntax_async_from_dict(): + await test_analyze_syntax_async(request_type=dict) + + +def test_analyze_syntax_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_syntax( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_syntax_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_syntax_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_syntax( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_syntax_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.ClassifyTextRequest, + dict, +]) +def test_classify_text(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse( + ) + response = client.classify_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ClassifyTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +def test_classify_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + client.classify_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ClassifyTextRequest() + +@pytest.mark.asyncio +async def test_classify_text_async(transport: str = 'grpc_asyncio', request_type=language_service.ClassifyTextRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse( + )) + response = await client.classify_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ClassifyTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +@pytest.mark.asyncio +async def test_classify_text_async_from_dict(): + await test_classify_text_async(request_type=dict) + + +def test_classify_text_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.classify_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + + +def test_classify_text_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + +@pytest.mark.asyncio +async def test_classify_text_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.classify_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_classify_text_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.ModerateTextRequest, + dict, +]) +def test_moderate_text(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse( + ) + response = client.moderate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +def test_moderate_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + client.moderate_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + +@pytest.mark.asyncio +async def test_moderate_text_async(transport: str = 'grpc_asyncio', request_type=language_service.ModerateTextRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.ModerateTextResponse( + )) + response = await client.moderate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +@pytest.mark.asyncio +async def test_moderate_text_async_from_dict(): + await test_moderate_text_async(request_type=dict) + + +def test_moderate_text_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.moderate_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + + +def test_moderate_text_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + +@pytest.mark.asyncio +async def test_moderate_text_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.ModerateTextResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.moderate_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_moderate_text_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnnotateTextRequest, + dict, +]) +def test_annotate_text(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse( + language='language_value', + ) + response = client.annotate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnnotateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + assert response.language == 'language_value' + + +def test_annotate_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + client.annotate_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnnotateTextRequest() + +@pytest.mark.asyncio +async def test_annotate_text_async(transport: str = 'grpc_asyncio', request_type=language_service.AnnotateTextRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse( + language='language_value', + )) + response = await client.annotate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnnotateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_annotate_text_async_from_dict(): + await test_annotate_text_async(request_type=dict) + + +def test_annotate_text_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.annotate_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].features + mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_annotate_text_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_annotate_text_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.annotate_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].features + mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_annotate_text_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSentimentRequest, + dict, +]) +def test_analyze_sentiment_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_sentiment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_sentiment_rest_required_fields(request_type=language_service.AnalyzeSentimentRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_sentiment(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_sentiment_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_sentiment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_sentiment_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_sentiment") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_sentiment") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeSentimentRequest.pb(language_service.AnalyzeSentimentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeSentimentResponse.to_json(language_service.AnalyzeSentimentResponse()) + + request = language_service.AnalyzeSentimentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeSentimentResponse() + + client.analyze_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSentimentRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_sentiment(request) + + +def test_analyze_sentiment_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_sentiment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta2/documents:analyzeSentiment" % client.transport._host, args[1]) + + +def test_analyze_sentiment_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_sentiment_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitiesRequest, + dict, +]) +def test_analyze_entities_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_entities(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + assert response.language == 'language_value' + + +def test_analyze_entities_rest_required_fields(request_type=language_service.AnalyzeEntitiesRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_entities(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_entities_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_entities._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_entities_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entities") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entities") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeEntitiesRequest.pb(language_service.AnalyzeEntitiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeEntitiesResponse.to_json(language_service.AnalyzeEntitiesResponse()) + + request = language_service.AnalyzeEntitiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeEntitiesResponse() + + client.analyze_entities(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_entities_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitiesRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_entities(request) + + +def test_analyze_entities_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_entities(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta2/documents:analyzeEntities" % client.transport._host, args[1]) + + +def test_analyze_entities_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entities_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitySentimentRequest, + dict, +]) +def test_analyze_entity_sentiment_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_entity_sentiment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_entity_sentiment_rest_required_fields(request_type=language_service.AnalyzeEntitySentimentRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_entity_sentiment(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_entity_sentiment_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_entity_sentiment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entity_sentiment") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entity_sentiment") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeEntitySentimentRequest.pb(language_service.AnalyzeEntitySentimentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeEntitySentimentResponse.to_json(language_service.AnalyzeEntitySentimentResponse()) + + request = language_service.AnalyzeEntitySentimentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeEntitySentimentResponse() + + client.analyze_entity_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_entity_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitySentimentRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_entity_sentiment(request) + + +def test_analyze_entity_sentiment_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_entity_sentiment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta2/documents:analyzeEntitySentiment" % client.transport._host, args[1]) + + +def test_analyze_entity_sentiment_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entity_sentiment_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSyntaxRequest, + dict, +]) +def test_analyze_syntax_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_syntax(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + assert response.language == 'language_value' + + +def test_analyze_syntax_rest_required_fields(request_type=language_service.AnalyzeSyntaxRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_syntax(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_syntax_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_syntax._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_syntax_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_syntax") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_syntax") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeSyntaxRequest.pb(language_service.AnalyzeSyntaxRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeSyntaxResponse.to_json(language_service.AnalyzeSyntaxResponse()) + + request = language_service.AnalyzeSyntaxRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeSyntaxResponse() + + client.analyze_syntax(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_syntax_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSyntaxRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_syntax(request) + + +def test_analyze_syntax_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_syntax(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta2/documents:analyzeSyntax" % client.transport._host, args[1]) + + +def test_analyze_syntax_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_syntax_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.ClassifyTextRequest, + dict, +]) +def test_classify_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.classify_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +def test_classify_text_rest_required_fields(request_type=language_service.ClassifyTextRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.classify_text(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_classify_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.classify_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_classify_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_classify_text") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_classify_text") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.ClassifyTextRequest.pb(language_service.ClassifyTextRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.ClassifyTextResponse.to_json(language_service.ClassifyTextResponse()) + + request = language_service.ClassifyTextRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.ClassifyTextResponse() + + client.classify_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_classify_text_rest_bad_request(transport: str = 'rest', request_type=language_service.ClassifyTextRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.classify_text(request) + + +def test_classify_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.classify_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta2/documents:classifyText" % client.transport._host, args[1]) + + +def test_classify_text_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + +def test_classify_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.ModerateTextRequest, + dict, +]) +def test_moderate_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.moderate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +def test_moderate_text_rest_required_fields(request_type=language_service.ModerateTextRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).moderate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).moderate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.moderate_text(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_moderate_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.moderate_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_moderate_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_moderate_text") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_moderate_text") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.ModerateTextRequest.pb(language_service.ModerateTextRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.ModerateTextResponse.to_json(language_service.ModerateTextResponse()) + + request = language_service.ModerateTextRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.ModerateTextResponse() + + client.moderate_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_moderate_text_rest_bad_request(transport: str = 'rest', request_type=language_service.ModerateTextRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.moderate_text(request) + + +def test_moderate_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.moderate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta2/documents:moderateText" % client.transport._host, args[1]) + + +def test_moderate_text_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + +def test_moderate_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnnotateTextRequest, + dict, +]) +def test_annotate_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.annotate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + assert response.language == 'language_value' + + +def test_annotate_text_rest_required_fields(request_type=language_service.AnnotateTextRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.annotate_text(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_annotate_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.annotate_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", "features", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_annotate_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_annotate_text") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_annotate_text") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnnotateTextRequest.pb(language_service.AnnotateTextRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnnotateTextResponse.to_json(language_service.AnnotateTextResponse()) + + request = language_service.AnnotateTextRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnnotateTextResponse() + + client.annotate_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_annotate_text_rest_bad_request(transport: str = 'rest', request_type=language_service.AnnotateTextRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.annotate_text(request) + + +def test_annotate_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.annotate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta2/documents:annotateText" % client.transport._host, args[1]) + + +def test_annotate_text_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_annotate_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LanguageServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LanguageServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + transports.LanguageServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = LanguageServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LanguageServiceGrpcTransport, + ) + +def test_language_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LanguageServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_language_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.LanguageServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'analyze_sentiment', + 'analyze_entities', + 'analyze_entity_sentiment', + 'analyze_syntax', + 'classify_text', + 'moderate_text', + 'annotate_text', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_language_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LanguageServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_language_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LanguageServiceTransport() + adc.assert_called_once() + + +def test_language_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LanguageServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + ], +) +def test_language_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-language', 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + transports.LanguageServiceRestTransport, + ], +) +def test_language_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LanguageServiceGrpcTransport, grpc_helpers), + (transports.LanguageServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_language_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "language.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="language.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) +def test_language_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_language_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.LanguageServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_language_service_host_no_port(transport_name): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'language.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://language.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_language_service_host_with_port(transport_name): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'language.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://language.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_language_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LanguageServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LanguageServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.analyze_sentiment._session + session2 = client2.transport.analyze_sentiment._session + assert session1 != session2 + session1 = client1.transport.analyze_entities._session + session2 = client2.transport.analyze_entities._session + assert session1 != session2 + session1 = client1.transport.analyze_entity_sentiment._session + session2 = client2.transport.analyze_entity_sentiment._session + assert session1 != session2 + session1 = client1.transport.analyze_syntax._session + session2 = client2.transport.analyze_syntax._session + assert session1 != session2 + session1 = client1.transport.classify_text._session + session2 = client2.transport.classify_text._session + assert session1 != session2 + session1 = client1.transport.moderate_text._session + session2 = client2.transport.moderate_text._session + assert session1 != session2 + session1 = client1.transport.annotate_text._session + session2 = client2.transport.annotate_text._session + assert session1 != session2 +def test_language_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LanguageServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_language_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LanguageServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) +def test_language_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) +def test_language_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = LanguageServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = LanguageServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = LanguageServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = LanguageServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = LanguageServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = LanguageServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = LanguageServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = LanguageServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = LanguageServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = LanguageServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = LanguageServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 251bb7547334855ec50fa55a0bd1780b0def1636 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 24 May 2023 22:50:16 +0000 Subject: [PATCH 2/4] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/cloud/language_v1beta2/__init__.py | 4 + .../language_v1beta2/gapic_metadata.json | 15 + .../services/language_service/async_client.py | 98 + .../services/language_service/client.py | 98 + .../language_service/transports/base.py | 17 + .../language_service/transports/grpc.py | 29 + .../transports/grpc_asyncio.py | 30 + .../language_service/transports/rest.py | 139 + .../cloud/language_v1beta2/types/__init__.py | 4 + .../types/language_service.py | 59 +- owl-bot-staging/v1/.coveragerc | 13 - owl-bot-staging/v1/.flake8 | 33 - owl-bot-staging/v1/MANIFEST.in | 2 - owl-bot-staging/v1/README.rst | 49 - owl-bot-staging/v1/docs/conf.py | 376 -- owl-bot-staging/v1/docs/index.rst | 7 - .../v1/docs/language_v1/language_service.rst | 6 - .../v1/docs/language_v1/services.rst | 6 - owl-bot-staging/v1/docs/language_v1/types.rst | 6 - .../v1/google/cloud/language/__init__.py | 75 - .../v1/google/cloud/language/gapic_version.py | 16 - .../v1/google/cloud/language/py.typed | 2 - .../v1/google/cloud/language_v1/__init__.py | 76 - .../cloud/language_v1/gapic_metadata.json | 118 - .../google/cloud/language_v1/gapic_version.py | 16 - .../v1/google/cloud/language_v1/py.typed | 2 - .../cloud/language_v1/services/__init__.py | 15 - .../services/language_service/__init__.py | 22 - .../services/language_service/async_client.py | 867 ---- .../services/language_service/client.py | 1020 ----- .../language_service/transports/__init__.py | 38 - .../language_service/transports/base.py | 261 -- .../language_service/transports/grpc.py | 405 -- .../transports/grpc_asyncio.py | 404 -- .../language_service/transports/rest.py | 907 ---- .../cloud/language_v1/types/__init__.py | 68 - .../language_v1/types/language_service.py | 1677 ------- owl-bot-staging/v1/mypy.ini | 3 - owl-bot-staging/v1/noxfile.py | 184 - ...language_service_analyze_entities_async.py | 55 - ..._language_service_analyze_entities_sync.py | 55 - ..._service_analyze_entity_sentiment_async.py | 55 - ...e_service_analyze_entity_sentiment_sync.py | 55 - ...anguage_service_analyze_sentiment_async.py | 55 - ...language_service_analyze_sentiment_sync.py | 55 - ...d_language_service_analyze_syntax_async.py | 55 - ...ed_language_service_analyze_syntax_sync.py | 55 - ...ed_language_service_annotate_text_async.py | 55 - ...ted_language_service_annotate_text_sync.py | 55 - ...ed_language_service_classify_text_async.py | 55 - ...ted_language_service_classify_text_sync.py | 55 - ...pet_metadata_google.cloud.language.v1.json | 1029 ----- .../v1/scripts/fixup_language_v1_keywords.py | 181 - owl-bot-staging/v1/setup.py | 90 - .../v1/testing/constraints-3.10.txt | 6 - .../v1/testing/constraints-3.11.txt | 6 - .../v1/testing/constraints-3.12.txt | 6 - .../v1/testing/constraints-3.7.txt | 9 - .../v1/testing/constraints-3.8.txt | 6 - .../v1/testing/constraints-3.9.txt | 6 - owl-bot-staging/v1/tests/__init__.py | 16 - owl-bot-staging/v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../tests/unit/gapic/language_v1/__init__.py | 16 - .../language_v1/test_language_service.py | 3674 --------------- owl-bot-staging/v1beta2/.coveragerc | 13 - owl-bot-staging/v1beta2/.flake8 | 33 - owl-bot-staging/v1beta2/MANIFEST.in | 2 - owl-bot-staging/v1beta2/README.rst | 49 - owl-bot-staging/v1beta2/docs/conf.py | 376 -- owl-bot-staging/v1beta2/docs/index.rst | 7 - .../language_v1beta2/language_service.rst | 6 - .../docs/language_v1beta2/services.rst | 6 - .../v1beta2/docs/language_v1beta2/types.rst | 6 - .../v1beta2/google/cloud/language/__init__.py | 79 - .../google/cloud/language/gapic_version.py | 16 - .../v1beta2/google/cloud/language/py.typed | 2 - .../google/cloud/language_v1beta2/__init__.py | 80 - .../language_v1beta2/gapic_metadata.json | 133 - .../cloud/language_v1beta2/gapic_version.py | 16 - .../google/cloud/language_v1beta2/py.typed | 2 - .../language_v1beta2/services/__init__.py | 15 - .../services/language_service/__init__.py | 22 - .../services/language_service/async_client.py | 963 ---- .../services/language_service/client.py | 1116 ----- .../language_service/transports/__init__.py | 38 - .../language_service/transports/base.py | 275 -- .../language_service/transports/grpc.py | 432 -- .../transports/grpc_asyncio.py | 431 -- .../language_service/transports/rest.py | 1029 ----- .../cloud/language_v1beta2/types/__init__.py | 72 - .../types/language_service.py | 1761 ------- owl-bot-staging/v1beta2/mypy.ini | 3 - owl-bot-staging/v1beta2/noxfile.py | 184 - ...language_service_analyze_entities_async.py | 55 - ..._language_service_analyze_entities_sync.py | 55 - ..._service_analyze_entity_sentiment_async.py | 55 - ...e_service_analyze_entity_sentiment_sync.py | 55 - ...anguage_service_analyze_sentiment_async.py | 55 - ...language_service_analyze_sentiment_sync.py | 55 - ...d_language_service_analyze_syntax_async.py | 55 - ...ed_language_service_analyze_syntax_sync.py | 55 - ...ed_language_service_annotate_text_async.py | 55 - ...ted_language_service_annotate_text_sync.py | 55 - ...ed_language_service_classify_text_async.py | 55 - ...ted_language_service_classify_text_sync.py | 55 - ...etadata_google.cloud.language.v1beta2.json | 1190 ----- .../fixup_language_v1beta2_keywords.py | 182 - owl-bot-staging/v1beta2/setup.py | 90 - .../v1beta2/testing/constraints-3.10.txt | 6 - .../v1beta2/testing/constraints-3.11.txt | 6 - .../v1beta2/testing/constraints-3.12.txt | 6 - .../v1beta2/testing/constraints-3.7.txt | 9 - .../v1beta2/testing/constraints-3.8.txt | 6 - .../v1beta2/testing/constraints-3.9.txt | 6 - owl-bot-staging/v1beta2/tests/__init__.py | 16 - .../v1beta2/tests/unit/__init__.py | 16 - .../v1beta2/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/language_v1beta2/__init__.py | 16 - .../language_v1beta2/test_language_service.py | 4070 ----------------- ...ed_language_service_moderate_text_async.py | 0 ...ted_language_service_moderate_text_sync.py | 0 ...pet_metadata_google.cloud.language.v1.json | 2 +- ...etadata_google.cloud.language.v1beta2.json | 163 +- scripts/fixup_language_v1beta2_keywords.py | 1 + .../language_v1beta2/test_language_service.py | 444 ++ 126 files changed, 1097 insertions(+), 25879 deletions(-) delete mode 100644 owl-bot-staging/v1/.coveragerc delete mode 100644 owl-bot-staging/v1/.flake8 delete mode 100644 owl-bot-staging/v1/MANIFEST.in delete mode 100644 owl-bot-staging/v1/README.rst delete mode 100644 owl-bot-staging/v1/docs/conf.py delete mode 100644 owl-bot-staging/v1/docs/index.rst delete mode 100644 owl-bot-staging/v1/docs/language_v1/language_service.rst delete mode 100644 owl-bot-staging/v1/docs/language_v1/services.rst delete mode 100644 owl-bot-staging/v1/docs/language_v1/types.rst delete mode 100644 owl-bot-staging/v1/google/cloud/language/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/language/gapic_version.py delete mode 100644 owl-bot-staging/v1/google/cloud/language/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py delete mode 100644 owl-bot-staging/v1/mypy.ini delete mode 100644 owl-bot-staging/v1/noxfile.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json delete mode 100644 owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py delete mode 100644 owl-bot-staging/v1/setup.py delete mode 100644 owl-bot-staging/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/v1/tests/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py delete mode 100644 owl-bot-staging/v1beta2/.coveragerc delete mode 100644 owl-bot-staging/v1beta2/.flake8 delete mode 100644 owl-bot-staging/v1beta2/MANIFEST.in delete mode 100644 owl-bot-staging/v1beta2/README.rst delete mode 100644 owl-bot-staging/v1beta2/docs/conf.py delete mode 100644 owl-bot-staging/v1beta2/docs/index.rst delete mode 100644 owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst delete mode 100644 owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst delete mode 100644 owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language/__init__.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language/py.typed delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py delete mode 100644 owl-bot-staging/v1beta2/mypy.ini delete mode 100644 owl-bot-staging/v1beta2/noxfile.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json delete mode 100644 owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py delete mode 100644 owl-bot-staging/v1beta2/setup.py delete mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/v1beta2/tests/__init__.py delete mode 100644 owl-bot-staging/v1beta2/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py delete mode 100644 owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py rename {owl-bot-staging/v1beta2/samples => samples}/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py (100%) rename {owl-bot-staging/v1beta2/samples => samples}/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py (100%) diff --git a/google/cloud/language_v1beta2/__init__.py b/google/cloud/language_v1beta2/__init__.py index 459b226d..d8e5de1b 100644 --- a/google/cloud/language_v1beta2/__init__.py +++ b/google/cloud/language_v1beta2/__init__.py @@ -39,6 +39,8 @@ EncodingType, Entity, EntityMention, + ModerateTextRequest, + ModerateTextResponse, PartOfSpeech, Sentence, Sentiment, @@ -68,6 +70,8 @@ "Entity", "EntityMention", "LanguageServiceClient", + "ModerateTextRequest", + "ModerateTextResponse", "PartOfSpeech", "Sentence", "Sentiment", diff --git a/google/cloud/language_v1beta2/gapic_metadata.json b/google/cloud/language_v1beta2/gapic_metadata.json index fca8f442..85a901f9 100644 --- a/google/cloud/language_v1beta2/gapic_metadata.json +++ b/google/cloud/language_v1beta2/gapic_metadata.json @@ -39,6 +39,11 @@ "methods": [ "classify_text" ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] } } }, @@ -74,6 +79,11 @@ "methods": [ "classify_text" ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] } } }, @@ -109,6 +119,11 @@ "methods": [ "classify_text" ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] } } } diff --git a/google/cloud/language_v1beta2/services/language_service/async_client.py b/google/cloud/language_v1beta2/services/language_service/async_client.py index 2676da9f..0ce7f723 100644 --- a/google/cloud/language_v1beta2/services/language_service/async_client.py +++ b/google/cloud/language_v1beta2/services/language_service/async_client.py @@ -793,6 +793,104 @@ async def sample_classify_text(): # Done; return the response. return response + async def moderate_text( + self, + request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ModerateTextResponse: + r"""Moderates a document for harmful and sensitive + categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_moderate_text(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ModerateTextRequest( + document=document, + ) + + # Make the request + response = await client.moderate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.ModerateTextRequest, dict]]): + The request object. The document moderation request + message. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.ModerateTextResponse: + The document moderation response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = language_service.ModerateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.moderate_text, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def annotate_text( self, request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, diff --git a/google/cloud/language_v1beta2/services/language_service/client.py b/google/cloud/language_v1beta2/services/language_service/client.py index 4c45046d..4888fcfc 100644 --- a/google/cloud/language_v1beta2/services/language_service/client.py +++ b/google/cloud/language_v1beta2/services/language_service/client.py @@ -952,6 +952,104 @@ def sample_classify_text(): # Done; return the response. return response + def moderate_text( + self, + request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ModerateTextResponse: + r"""Moderates a document for harmful and sensitive + categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_moderate_text(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ModerateTextRequest( + document=document, + ) + + # Make the request + response = client.moderate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.ModerateTextRequest, dict]): + The request object. The document moderation request + message. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.ModerateTextResponse: + The document moderation response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.ModerateTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.ModerateTextRequest): + request = language_service.ModerateTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.moderate_text] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def annotate_text( self, request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, diff --git a/google/cloud/language_v1beta2/services/language_service/transports/base.py b/google/cloud/language_v1beta2/services/language_service/transports/base.py index 5204e4c9..06cd03ef 100644 --- a/google/cloud/language_v1beta2/services/language_service/transports/base.py +++ b/google/cloud/language_v1beta2/services/language_service/transports/base.py @@ -200,6 +200,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.moderate_text: gapic_v1.method.wrap_method( + self.moderate_text, + default_timeout=None, + client_info=client_info, + ), self.annotate_text: gapic_v1.method.wrap_method( self.annotate_text, default_retry=retries.Retry( @@ -286,6 +291,18 @@ def classify_text( ]: raise NotImplementedError() + @property + def moderate_text( + self, + ) -> Callable[ + [language_service.ModerateTextRequest], + Union[ + language_service.ModerateTextResponse, + Awaitable[language_service.ModerateTextResponse], + ], + ]: + raise NotImplementedError() + @property def annotate_text( self, diff --git a/google/cloud/language_v1beta2/services/language_service/transports/grpc.py b/google/cloud/language_v1beta2/services/language_service/transports/grpc.py index f89362eb..ec629428 100644 --- a/google/cloud/language_v1beta2/services/language_service/transports/grpc.py +++ b/google/cloud/language_v1beta2/services/language_service/transports/grpc.py @@ -380,6 +380,35 @@ def classify_text( ) return self._stubs["classify_text"] + @property + def moderate_text( + self, + ) -> Callable[ + [language_service.ModerateTextRequest], language_service.ModerateTextResponse + ]: + r"""Return a callable for the moderate text method over gRPC. + + Moderates a document for harmful and sensitive + categories. + + Returns: + Callable[[~.ModerateTextRequest], + ~.ModerateTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "moderate_text" not in self._stubs: + self._stubs["moderate_text"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1beta2.LanguageService/ModerateText", + request_serializer=language_service.ModerateTextRequest.serialize, + response_deserializer=language_service.ModerateTextResponse.deserialize, + ) + return self._stubs["moderate_text"] + @property def annotate_text( self, diff --git a/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py b/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py index dc0f8f26..4d1a8065 100644 --- a/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py +++ b/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py @@ -385,6 +385,36 @@ def classify_text( ) return self._stubs["classify_text"] + @property + def moderate_text( + self, + ) -> Callable[ + [language_service.ModerateTextRequest], + Awaitable[language_service.ModerateTextResponse], + ]: + r"""Return a callable for the moderate text method over gRPC. + + Moderates a document for harmful and sensitive + categories. + + Returns: + Callable[[~.ModerateTextRequest], + Awaitable[~.ModerateTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "moderate_text" not in self._stubs: + self._stubs["moderate_text"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1beta2.LanguageService/ModerateText", + request_serializer=language_service.ModerateTextRequest.serialize, + response_deserializer=language_service.ModerateTextResponse.deserialize, + ) + return self._stubs["moderate_text"] + @property def annotate_text( self, diff --git a/google/cloud/language_v1beta2/services/language_service/transports/rest.py b/google/cloud/language_v1beta2/services/language_service/transports/rest.py index e0821a77..90a221e3 100644 --- a/google/cloud/language_v1beta2/services/language_service/transports/rest.py +++ b/google/cloud/language_v1beta2/services/language_service/transports/rest.py @@ -111,6 +111,14 @@ def post_classify_text(self, response): logging.log(f"Received response: {response}") return response + def pre_moderate_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_moderate_text(self, response): + logging.log(f"Received response: {response}") + return response + transport = LanguageServiceRestTransport(interceptor=MyCustomLanguageServiceInterceptor()) client = LanguageServiceClient(transport=transport) @@ -257,6 +265,29 @@ def post_classify_text( """ return response + def pre_moderate_text( + self, + request: language_service.ModerateTextRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[language_service.ModerateTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for moderate_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_moderate_text( + self, response: language_service.ModerateTextResponse + ) -> language_service.ModerateTextResponse: + """Post-rpc interceptor for moderate_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class LanguageServiceRestStub: @@ -945,6 +976,104 @@ def __call__( resp = self._interceptor.post_classify_text(resp) return resp + class _ModerateText(LanguageServiceRestStub): + def __hash__(self): + return hash("ModerateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: language_service.ModerateTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ModerateTextResponse: + r"""Call the moderate text method over HTTP. + + Args: + request (~.language_service.ModerateTextRequest): + The request object. The document moderation request + message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ModerateTextResponse: + The document moderation response + message. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/documents:moderateText", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_moderate_text(request, metadata) + pb_request = language_service.ModerateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.ModerateTextResponse() + pb_resp = language_service.ModerateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_moderate_text(resp) + return resp + @property def analyze_entities( self, @@ -1008,6 +1137,16 @@ def classify_text( # In C++ this would require a dynamic_cast return self._ClassifyText(self._session, self._host, self._interceptor) # type: ignore + @property + def moderate_text( + self, + ) -> Callable[ + [language_service.ModerateTextRequest], language_service.ModerateTextResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ModerateText(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/google/cloud/language_v1beta2/types/__init__.py b/google/cloud/language_v1beta2/types/__init__.py index b4b20c9c..4f98d4d3 100644 --- a/google/cloud/language_v1beta2/types/__init__.py +++ b/google/cloud/language_v1beta2/types/__init__.py @@ -33,6 +33,8 @@ EncodingType, Entity, EntityMention, + ModerateTextRequest, + ModerateTextResponse, PartOfSpeech, Sentence, Sentiment, @@ -59,6 +61,8 @@ "Document", "Entity", "EntityMention", + "ModerateTextRequest", + "ModerateTextResponse", "PartOfSpeech", "Sentence", "Sentiment", diff --git a/google/cloud/language_v1beta2/types/language_service.py b/google/cloud/language_v1beta2/types/language_service.py index 884a1512..d88273fd 100644 --- a/google/cloud/language_v1beta2/types/language_service.py +++ b/google/cloud/language_v1beta2/types/language_service.py @@ -44,6 +44,8 @@ "AnalyzeSyntaxResponse", "ClassifyTextRequest", "ClassifyTextResponse", + "ModerateTextRequest", + "ModerateTextResponse", "AnnotateTextRequest", "AnnotateTextResponse", }, @@ -1227,9 +1229,8 @@ class ClassificationCategory(proto.Message): Attributes: name (str): - The name of the category representing the document, from the - `predefined - taxonomy `__. + The name of the category representing the + document. confidence (float): The classifier's confidence of the category. Number represents how certain the classifier is @@ -1570,6 +1571,39 @@ class ClassifyTextResponse(proto.Message): ) +class ModerateTextRequest(proto.Message): + r"""The document moderation request message. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + """ + + document: "Document" = proto.Field( + proto.MESSAGE, + number=1, + message="Document", + ) + + +class ModerateTextResponse(proto.Message): + r"""The document moderation response message. + + Attributes: + moderation_categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): + Harmful and sensitive categories representing + the input document. + """ + + moderation_categories: MutableSequence[ + "ClassificationCategory" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ClassificationCategory", + ) + + class AnnotateTextRequest(proto.Message): r"""The request message for the text annotation API, which can perform multiple analysis types (sentiment, entities, and @@ -1588,7 +1622,7 @@ class AnnotateTextRequest(proto.Message): class Features(proto.Message): r"""All available features for sentiment, syntax, and semantic analysis. Setting each one to true will enable that specific - analysis for the input. Next ID: 11 + analysis for the input. Next ID: 12 Attributes: extract_syntax (bool): @@ -1605,6 +1639,9 @@ class Features(proto.Message): the API will use the default model which classifies into a `predefined taxonomy `__. + moderate_text (bool): + Moderate the document for harmful and + sensitive categories. classification_model_options (google.cloud.language_v1beta2.types.ClassificationModelOptions): The model options to use for classification. Defaults to v1 options if not specified. Only used if ``classify_text`` is @@ -1631,6 +1668,10 @@ class Features(proto.Message): proto.BOOL, number=6, ) + moderate_text: bool = proto.Field( + proto.BOOL, + number=11, + ) classification_model_options: "ClassificationModelOptions" = proto.Field( proto.MESSAGE, number=10, @@ -1682,6 +1723,9 @@ class AnnotateTextResponse(proto.Message): field for more details. categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): Categories identified in the input document. + moderation_categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): + Harmful and sensitive categories identified + in the input document. """ sentences: MutableSequence["Sentence"] = proto.RepeatedField( @@ -1713,6 +1757,13 @@ class AnnotateTextResponse(proto.Message): number=6, message="ClassificationCategory", ) + moderation_categories: MutableSequence[ + "ClassificationCategory" + ] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="ClassificationCategory", + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc deleted file mode 100644 index c1f51536..00000000 --- a/owl-bot-staging/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/language/__init__.py - google/cloud/language/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v1/.flake8 b/owl-bot-staging/v1/.flake8 deleted file mode 100644 index 29227d4c..00000000 --- a/owl-bot-staging/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in deleted file mode 100644 index e0f21a43..00000000 --- a/owl-bot-staging/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/language *.py -recursive-include google/cloud/language_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst deleted file mode 100644 index 0c5f1b6b..00000000 --- a/owl-bot-staging/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Language API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Language API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py deleted file mode 100644 index 2e1b322d..00000000 --- a/owl-bot-staging/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-language documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-language" -copyright = u"2022, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-language-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-language.tex", - u"google-cloud-language Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-language", - u"Google Cloud Language Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-language", - u"google-cloud-language Documentation", - author, - "google-cloud-language", - "GAPIC library for Google Cloud Language API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst deleted file mode 100644 index 90928956..00000000 --- a/owl-bot-staging/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - language_v1/services - language_v1/types diff --git a/owl-bot-staging/v1/docs/language_v1/language_service.rst b/owl-bot-staging/v1/docs/language_v1/language_service.rst deleted file mode 100644 index 96e8755a..00000000 --- a/owl-bot-staging/v1/docs/language_v1/language_service.rst +++ /dev/null @@ -1,6 +0,0 @@ -LanguageService ---------------------------------- - -.. automodule:: google.cloud.language_v1.services.language_service - :members: - :inherited-members: diff --git a/owl-bot-staging/v1/docs/language_v1/services.rst b/owl-bot-staging/v1/docs/language_v1/services.rst deleted file mode 100644 index 26f74fe9..00000000 --- a/owl-bot-staging/v1/docs/language_v1/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Language v1 API -========================================= -.. toctree:: - :maxdepth: 2 - - language_service diff --git a/owl-bot-staging/v1/docs/language_v1/types.rst b/owl-bot-staging/v1/docs/language_v1/types.rst deleted file mode 100644 index 5dd3769e..00000000 --- a/owl-bot-staging/v1/docs/language_v1/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Language v1 API -====================================== - -.. automodule:: google.cloud.language_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v1/google/cloud/language/__init__.py b/owl-bot-staging/v1/google/cloud/language/__init__.py deleted file mode 100644 index ff0761f6..00000000 --- a/owl-bot-staging/v1/google/cloud/language/__init__.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.language import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.language_v1.services.language_service.client import LanguageServiceClient -from google.cloud.language_v1.services.language_service.async_client import LanguageServiceAsyncClient - -from google.cloud.language_v1.types.language_service import AnalyzeEntitiesRequest -from google.cloud.language_v1.types.language_service import AnalyzeEntitiesResponse -from google.cloud.language_v1.types.language_service import AnalyzeEntitySentimentRequest -from google.cloud.language_v1.types.language_service import AnalyzeEntitySentimentResponse -from google.cloud.language_v1.types.language_service import AnalyzeSentimentRequest -from google.cloud.language_v1.types.language_service import AnalyzeSentimentResponse -from google.cloud.language_v1.types.language_service import AnalyzeSyntaxRequest -from google.cloud.language_v1.types.language_service import AnalyzeSyntaxResponse -from google.cloud.language_v1.types.language_service import AnnotateTextRequest -from google.cloud.language_v1.types.language_service import AnnotateTextResponse -from google.cloud.language_v1.types.language_service import ClassificationCategory -from google.cloud.language_v1.types.language_service import ClassificationModelOptions -from google.cloud.language_v1.types.language_service import ClassifyTextRequest -from google.cloud.language_v1.types.language_service import ClassifyTextResponse -from google.cloud.language_v1.types.language_service import DependencyEdge -from google.cloud.language_v1.types.language_service import Document -from google.cloud.language_v1.types.language_service import Entity -from google.cloud.language_v1.types.language_service import EntityMention -from google.cloud.language_v1.types.language_service import PartOfSpeech -from google.cloud.language_v1.types.language_service import Sentence -from google.cloud.language_v1.types.language_service import Sentiment -from google.cloud.language_v1.types.language_service import TextSpan -from google.cloud.language_v1.types.language_service import Token -from google.cloud.language_v1.types.language_service import EncodingType - -__all__ = ('LanguageServiceClient', - 'LanguageServiceAsyncClient', - 'AnalyzeEntitiesRequest', - 'AnalyzeEntitiesResponse', - 'AnalyzeEntitySentimentRequest', - 'AnalyzeEntitySentimentResponse', - 'AnalyzeSentimentRequest', - 'AnalyzeSentimentResponse', - 'AnalyzeSyntaxRequest', - 'AnalyzeSyntaxResponse', - 'AnnotateTextRequest', - 'AnnotateTextResponse', - 'ClassificationCategory', - 'ClassificationModelOptions', - 'ClassifyTextRequest', - 'ClassifyTextResponse', - 'DependencyEdge', - 'Document', - 'Entity', - 'EntityMention', - 'PartOfSpeech', - 'Sentence', - 'Sentiment', - 'TextSpan', - 'Token', - 'EncodingType', -) diff --git a/owl-bot-staging/v1/google/cloud/language/gapic_version.py b/owl-bot-staging/v1/google/cloud/language/gapic_version.py deleted file mode 100644 index 405b1ceb..00000000 --- a/owl-bot-staging/v1/google/cloud/language/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/language/py.typed b/owl-bot-staging/v1/google/cloud/language/py.typed deleted file mode 100644 index c0acc99a..00000000 --- a/owl-bot-staging/v1/google/cloud/language/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/language_v1/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/__init__.py deleted file mode 100644 index d2eb7660..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/__init__.py +++ /dev/null @@ -1,76 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.language_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.language_service import LanguageServiceClient -from .services.language_service import LanguageServiceAsyncClient - -from .types.language_service import AnalyzeEntitiesRequest -from .types.language_service import AnalyzeEntitiesResponse -from .types.language_service import AnalyzeEntitySentimentRequest -from .types.language_service import AnalyzeEntitySentimentResponse -from .types.language_service import AnalyzeSentimentRequest -from .types.language_service import AnalyzeSentimentResponse -from .types.language_service import AnalyzeSyntaxRequest -from .types.language_service import AnalyzeSyntaxResponse -from .types.language_service import AnnotateTextRequest -from .types.language_service import AnnotateTextResponse -from .types.language_service import ClassificationCategory -from .types.language_service import ClassificationModelOptions -from .types.language_service import ClassifyTextRequest -from .types.language_service import ClassifyTextResponse -from .types.language_service import DependencyEdge -from .types.language_service import Document -from .types.language_service import Entity -from .types.language_service import EntityMention -from .types.language_service import PartOfSpeech -from .types.language_service import Sentence -from .types.language_service import Sentiment -from .types.language_service import TextSpan -from .types.language_service import Token -from .types.language_service import EncodingType - -__all__ = ( - 'LanguageServiceAsyncClient', -'AnalyzeEntitiesRequest', -'AnalyzeEntitiesResponse', -'AnalyzeEntitySentimentRequest', -'AnalyzeEntitySentimentResponse', -'AnalyzeSentimentRequest', -'AnalyzeSentimentResponse', -'AnalyzeSyntaxRequest', -'AnalyzeSyntaxResponse', -'AnnotateTextRequest', -'AnnotateTextResponse', -'ClassificationCategory', -'ClassificationModelOptions', -'ClassifyTextRequest', -'ClassifyTextResponse', -'DependencyEdge', -'Document', -'EncodingType', -'Entity', -'EntityMention', -'LanguageServiceClient', -'PartOfSpeech', -'Sentence', -'Sentiment', -'TextSpan', -'Token', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json deleted file mode 100644 index e475aad9..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json +++ /dev/null @@ -1,118 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.language_v1", - "protoPackage": "google.cloud.language.v1", - "schema": "1.0", - "services": { - "LanguageService": { - "clients": { - "grpc": { - "libraryClient": "LanguageServiceClient", - "rpcs": { - "AnalyzeEntities": { - "methods": [ - "analyze_entities" - ] - }, - "AnalyzeEntitySentiment": { - "methods": [ - "analyze_entity_sentiment" - ] - }, - "AnalyzeSentiment": { - "methods": [ - "analyze_sentiment" - ] - }, - "AnalyzeSyntax": { - "methods": [ - "analyze_syntax" - ] - }, - "AnnotateText": { - "methods": [ - "annotate_text" - ] - }, - "ClassifyText": { - "methods": [ - "classify_text" - ] - } - } - }, - "grpc-async": { - "libraryClient": "LanguageServiceAsyncClient", - "rpcs": { - "AnalyzeEntities": { - "methods": [ - "analyze_entities" - ] - }, - "AnalyzeEntitySentiment": { - "methods": [ - "analyze_entity_sentiment" - ] - }, - "AnalyzeSentiment": { - "methods": [ - "analyze_sentiment" - ] - }, - "AnalyzeSyntax": { - "methods": [ - "analyze_syntax" - ] - }, - "AnnotateText": { - "methods": [ - "annotate_text" - ] - }, - "ClassifyText": { - "methods": [ - "classify_text" - ] - } - } - }, - "rest": { - "libraryClient": "LanguageServiceClient", - "rpcs": { - "AnalyzeEntities": { - "methods": [ - "analyze_entities" - ] - }, - "AnalyzeEntitySentiment": { - "methods": [ - "analyze_entity_sentiment" - ] - }, - "AnalyzeSentiment": { - "methods": [ - "analyze_sentiment" - ] - }, - "AnalyzeSyntax": { - "methods": [ - "analyze_syntax" - ] - }, - "AnnotateText": { - "methods": [ - "annotate_text" - ] - }, - "ClassifyText": { - "methods": [ - "classify_text" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py b/owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py deleted file mode 100644 index 405b1ceb..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/language_v1/py.typed b/owl-bot-staging/v1/google/cloud/language_v1/py.typed deleted file mode 100644 index c0acc99a..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py deleted file mode 100644 index e8e1c384..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py deleted file mode 100644 index 6e5f9052..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import LanguageServiceClient -from .async_client import LanguageServiceAsyncClient - -__all__ = ( - 'LanguageServiceClient', - 'LanguageServiceAsyncClient', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py deleted file mode 100644 index 565636d9..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py +++ /dev/null @@ -1,867 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.language_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.language_v1.types import language_service -from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport -from .client import LanguageServiceClient - - -class LanguageServiceAsyncClient: - """Provides text analysis operations such as sentiment analysis - and entity recognition. - """ - - _client: LanguageServiceClient - - DEFAULT_ENDPOINT = LanguageServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = LanguageServiceClient.DEFAULT_MTLS_ENDPOINT - - common_billing_account_path = staticmethod(LanguageServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(LanguageServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(LanguageServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(LanguageServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(LanguageServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(LanguageServiceClient.parse_common_organization_path) - common_project_path = staticmethod(LanguageServiceClient.common_project_path) - parse_common_project_path = staticmethod(LanguageServiceClient.parse_common_project_path) - common_location_path = staticmethod(LanguageServiceClient.common_location_path) - parse_common_location_path = staticmethod(LanguageServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceAsyncClient: The constructed client. - """ - return LanguageServiceClient.from_service_account_info.__func__(LanguageServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceAsyncClient: The constructed client. - """ - return LanguageServiceClient.from_service_account_file.__func__(LanguageServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return LanguageServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> LanguageServiceTransport: - """Returns the transport used by the client instance. - - Returns: - LanguageServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(LanguageServiceClient).get_transport_class, type(LanguageServiceClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, LanguageServiceTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the language service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.LanguageServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = LanguageServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def analyze_sentiment(self, - request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSentimentResponse: - r"""Analyzes the sentiment of the provided text. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - async def sample_analyze_sentiment(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1.types.AnalyzeSentimentRequest, dict]]): - The request object. The sentiment analysis request - message. - document (:class:`google.cloud.language_v1.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): - The encoding type used by the API to - calculate sentence offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeSentimentResponse: - The sentiment analysis response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeSentimentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_entities(self, - request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitiesResponse: - r"""Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - async def sample_analyze_entities(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entities(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1.types.AnalyzeEntitiesRequest, dict]]): - The request object. The entity analysis request message. - document (:class:`google.cloud.language_v1.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeEntitiesResponse: - The entity analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeEntitiesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_entities, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_entity_sentiment(self, - request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitySentimentResponse: - r"""Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - async def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1.types.AnalyzeEntitySentimentRequest, dict]]): - The request object. The entity-level sentiment analysis - request message. - document (:class:`google.cloud.language_v1.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeEntitySentimentResponse: - The entity-level sentiment analysis - response message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeEntitySentimentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_entity_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_syntax(self, - request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSyntaxResponse: - r"""Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - async def sample_analyze_syntax(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = await client.analyze_syntax(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1.types.AnalyzeSyntaxRequest, dict]]): - The request object. The syntax analysis request message. - document (:class:`google.cloud.language_v1.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeSyntaxResponse: - The syntax analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeSyntaxRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_syntax, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def classify_text(self, - request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.ClassifyTextResponse: - r"""Classifies a document into categories. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - async def sample_classify_text(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = await client.classify_text(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1.types.ClassifyTextRequest, dict]]): - The request object. The document classification request - message. - document (:class:`google.cloud.language_v1.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.ClassifyTextResponse: - The document classification response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.ClassifyTextRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.classify_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def annotate_text(self, - request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - features: Optional[language_service.AnnotateTextRequest.Features] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnnotateTextResponse: - r"""A convenience method that provides all the features - that analyzeSentiment, analyzeEntities, and - analyzeSyntax provide in one call. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - async def sample_annotate_text(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = await client.annotate_text(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1.types.AnnotateTextRequest, dict]]): - The request object. The request message for the text - annotation API, which can perform - multiple analysis types (sentiment, - entities, and syntax) in one call. - document (:class:`google.cloud.language_v1.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - features (:class:`google.cloud.language_v1.types.AnnotateTextRequest.Features`): - Required. The enabled features. - This corresponds to the ``features`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnnotateTextResponse: - The text annotations response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, features, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnnotateTextRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if features is not None: - request.features = features - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.annotate_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "LanguageServiceAsyncClient", -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py deleted file mode 100644 index 831092ef..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py +++ /dev/null @@ -1,1020 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.language_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.language_v1.types import language_service -from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import LanguageServiceGrpcTransport -from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport -from .transports.rest import LanguageServiceRestTransport - - -class LanguageServiceClientMeta(type): - """Metaclass for the LanguageService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] - _transport_registry["grpc"] = LanguageServiceGrpcTransport - _transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport - _transport_registry["rest"] = LanguageServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[LanguageServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class LanguageServiceClient(metaclass=LanguageServiceClientMeta): - """Provides text analysis operations such as sentiment analysis - and entity recognition. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "language.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> LanguageServiceTransport: - """Returns the transport used by the client instance. - - Returns: - LanguageServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LanguageServiceTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the language service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, LanguageServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, LanguageServiceTransport): - # transport is a LanguageServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def analyze_sentiment(self, - request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSentimentResponse: - r"""Analyzes the sentiment of the provided text. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - def sample_analyze_sentiment(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1.types.AnalyzeSentimentRequest, dict]): - The request object. The sentiment analysis request - message. - document (google.cloud.language_v1.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate sentence offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeSentimentResponse: - The sentiment analysis response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeSentimentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeSentimentRequest): - request = language_service.AnalyzeSentimentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_sentiment] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_entities(self, - request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitiesResponse: - r"""Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - def sample_analyze_entities(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = client.analyze_entities(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1.types.AnalyzeEntitiesRequest, dict]): - The request object. The entity analysis request message. - document (google.cloud.language_v1.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeEntitiesResponse: - The entity analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeEntitiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeEntitiesRequest): - request = language_service.AnalyzeEntitiesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_entities] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_entity_sentiment(self, - request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitySentimentResponse: - r"""Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1.types.AnalyzeEntitySentimentRequest, dict]): - The request object. The entity-level sentiment analysis - request message. - document (google.cloud.language_v1.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeEntitySentimentResponse: - The entity-level sentiment analysis - response message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeEntitySentimentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeEntitySentimentRequest): - request = language_service.AnalyzeEntitySentimentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_entity_sentiment] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_syntax(self, - request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSyntaxResponse: - r"""Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - def sample_analyze_syntax(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = client.analyze_syntax(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1.types.AnalyzeSyntaxRequest, dict]): - The request object. The syntax analysis request message. - document (google.cloud.language_v1.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeSyntaxResponse: - The syntax analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeSyntaxRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeSyntaxRequest): - request = language_service.AnalyzeSyntaxRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_syntax] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def classify_text(self, - request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.ClassifyTextResponse: - r"""Classifies a document into categories. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - def sample_classify_text(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = client.classify_text(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1.types.ClassifyTextRequest, dict]): - The request object. The document classification request - message. - document (google.cloud.language_v1.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.ClassifyTextResponse: - The document classification response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.ClassifyTextRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.ClassifyTextRequest): - request = language_service.ClassifyTextRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.classify_text] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def annotate_text(self, - request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - features: Optional[language_service.AnnotateTextRequest.Features] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnnotateTextResponse: - r"""A convenience method that provides all the features - that analyzeSentiment, analyzeEntities, and - analyzeSyntax provide in one call. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - def sample_annotate_text(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = client.annotate_text(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1.types.AnnotateTextRequest, dict]): - The request object. The request message for the text - annotation API, which can perform - multiple analysis types (sentiment, - entities, and syntax) in one call. - document (google.cloud.language_v1.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - features (google.cloud.language_v1.types.AnnotateTextRequest.Features): - Required. The enabled features. - This corresponds to the ``features`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnnotateTextResponse: - The text annotations response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, features, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnnotateTextRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnnotateTextRequest): - request = language_service.AnnotateTextRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if features is not None: - request.features = features - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.annotate_text] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "LanguageServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "LanguageServiceClient", -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py deleted file mode 100644 index 3cb6ab92..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import LanguageServiceTransport -from .grpc import LanguageServiceGrpcTransport -from .grpc_asyncio import LanguageServiceGrpcAsyncIOTransport -from .rest import LanguageServiceRestTransport -from .rest import LanguageServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] -_transport_registry['grpc'] = LanguageServiceGrpcTransport -_transport_registry['grpc_asyncio'] = LanguageServiceGrpcAsyncIOTransport -_transport_registry['rest'] = LanguageServiceRestTransport - -__all__ = ( - 'LanguageServiceTransport', - 'LanguageServiceGrpcTransport', - 'LanguageServiceGrpcAsyncIOTransport', - 'LanguageServiceRestTransport', - 'LanguageServiceRestInterceptor', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py deleted file mode 100644 index 99429175..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py +++ /dev/null @@ -1,261 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.language_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.language_v1.types import language_service - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class LanguageServiceTransport(abc.ABC): - """Abstract transport class for LanguageService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'language.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.analyze_sentiment: gapic_v1.method.wrap_method( - self.analyze_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.analyze_entities: gapic_v1.method.wrap_method( - self.analyze_entities, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.analyze_entity_sentiment: gapic_v1.method.wrap_method( - self.analyze_entity_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.analyze_syntax: gapic_v1.method.wrap_method( - self.analyze_syntax, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.classify_text: gapic_v1.method.wrap_method( - self.classify_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.annotate_text: gapic_v1.method.wrap_method( - self.annotate_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - Union[ - language_service.AnalyzeSentimentResponse, - Awaitable[language_service.AnalyzeSentimentResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - Union[ - language_service.AnalyzeEntitiesResponse, - Awaitable[language_service.AnalyzeEntitiesResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - Union[ - language_service.AnalyzeEntitySentimentResponse, - Awaitable[language_service.AnalyzeEntitySentimentResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - Union[ - language_service.AnalyzeSyntaxResponse, - Awaitable[language_service.AnalyzeSyntaxResponse] - ]]: - raise NotImplementedError() - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - Union[ - language_service.ClassifyTextResponse, - Awaitable[language_service.ClassifyTextResponse] - ]]: - raise NotImplementedError() - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - Union[ - language_service.AnnotateTextResponse, - Awaitable[language_service.AnnotateTextResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'LanguageServiceTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py deleted file mode 100644 index 49ab72e3..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py +++ /dev/null @@ -1,405 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.language_v1.types import language_service -from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO - - -class LanguageServiceGrpcTransport(LanguageServiceTransport): - """gRPC backend transport for LanguageService. - - Provides text analysis operations such as sentiment analysis - and entity recognition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - language_service.AnalyzeSentimentResponse]: - r"""Return a callable for the analyze sentiment method over gRPC. - - Analyzes the sentiment of the provided text. - - Returns: - Callable[[~.AnalyzeSentimentRequest], - ~.AnalyzeSentimentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_sentiment' not in self._stubs: - self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeSentiment', - request_serializer=language_service.AnalyzeSentimentRequest.serialize, - response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, - ) - return self._stubs['analyze_sentiment'] - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - language_service.AnalyzeEntitiesResponse]: - r"""Return a callable for the analyze entities method over gRPC. - - Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - Returns: - Callable[[~.AnalyzeEntitiesRequest], - ~.AnalyzeEntitiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entities' not in self._stubs: - self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeEntities', - request_serializer=language_service.AnalyzeEntitiesRequest.serialize, - response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, - ) - return self._stubs['analyze_entities'] - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - language_service.AnalyzeEntitySentimentResponse]: - r"""Return a callable for the analyze entity sentiment method over gRPC. - - Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - Returns: - Callable[[~.AnalyzeEntitySentimentRequest], - ~.AnalyzeEntitySentimentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entity_sentiment' not in self._stubs: - self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment', - request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, - response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, - ) - return self._stubs['analyze_entity_sentiment'] - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - language_service.AnalyzeSyntaxResponse]: - r"""Return a callable for the analyze syntax method over gRPC. - - Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - Returns: - Callable[[~.AnalyzeSyntaxRequest], - ~.AnalyzeSyntaxResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_syntax' not in self._stubs: - self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', - request_serializer=language_service.AnalyzeSyntaxRequest.serialize, - response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, - ) - return self._stubs['analyze_syntax'] - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - language_service.ClassifyTextResponse]: - r"""Return a callable for the classify text method over gRPC. - - Classifies a document into categories. - - Returns: - Callable[[~.ClassifyTextRequest], - ~.ClassifyTextResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'classify_text' not in self._stubs: - self._stubs['classify_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/ClassifyText', - request_serializer=language_service.ClassifyTextRequest.serialize, - response_deserializer=language_service.ClassifyTextResponse.deserialize, - ) - return self._stubs['classify_text'] - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - language_service.AnnotateTextResponse]: - r"""Return a callable for the annotate text method over gRPC. - - A convenience method that provides all the features - that analyzeSentiment, analyzeEntities, and - analyzeSyntax provide in one call. - - Returns: - Callable[[~.AnnotateTextRequest], - ~.AnnotateTextResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'annotate_text' not in self._stubs: - self._stubs['annotate_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnnotateText', - request_serializer=language_service.AnnotateTextRequest.serialize, - response_deserializer=language_service.AnnotateTextResponse.deserialize, - ) - return self._stubs['annotate_text'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'LanguageServiceGrpcTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py deleted file mode 100644 index a95493e5..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,404 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.language_v1.types import language_service -from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import LanguageServiceGrpcTransport - - -class LanguageServiceGrpcAsyncIOTransport(LanguageServiceTransport): - """gRPC AsyncIO backend transport for LanguageService. - - Provides text analysis operations such as sentiment analysis - and entity recognition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - Awaitable[language_service.AnalyzeSentimentResponse]]: - r"""Return a callable for the analyze sentiment method over gRPC. - - Analyzes the sentiment of the provided text. - - Returns: - Callable[[~.AnalyzeSentimentRequest], - Awaitable[~.AnalyzeSentimentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_sentiment' not in self._stubs: - self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeSentiment', - request_serializer=language_service.AnalyzeSentimentRequest.serialize, - response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, - ) - return self._stubs['analyze_sentiment'] - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - Awaitable[language_service.AnalyzeEntitiesResponse]]: - r"""Return a callable for the analyze entities method over gRPC. - - Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - Returns: - Callable[[~.AnalyzeEntitiesRequest], - Awaitable[~.AnalyzeEntitiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entities' not in self._stubs: - self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeEntities', - request_serializer=language_service.AnalyzeEntitiesRequest.serialize, - response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, - ) - return self._stubs['analyze_entities'] - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - Awaitable[language_service.AnalyzeEntitySentimentResponse]]: - r"""Return a callable for the analyze entity sentiment method over gRPC. - - Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - Returns: - Callable[[~.AnalyzeEntitySentimentRequest], - Awaitable[~.AnalyzeEntitySentimentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entity_sentiment' not in self._stubs: - self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment', - request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, - response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, - ) - return self._stubs['analyze_entity_sentiment'] - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - Awaitable[language_service.AnalyzeSyntaxResponse]]: - r"""Return a callable for the analyze syntax method over gRPC. - - Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - Returns: - Callable[[~.AnalyzeSyntaxRequest], - Awaitable[~.AnalyzeSyntaxResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_syntax' not in self._stubs: - self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', - request_serializer=language_service.AnalyzeSyntaxRequest.serialize, - response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, - ) - return self._stubs['analyze_syntax'] - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - Awaitable[language_service.ClassifyTextResponse]]: - r"""Return a callable for the classify text method over gRPC. - - Classifies a document into categories. - - Returns: - Callable[[~.ClassifyTextRequest], - Awaitable[~.ClassifyTextResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'classify_text' not in self._stubs: - self._stubs['classify_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/ClassifyText', - request_serializer=language_service.ClassifyTextRequest.serialize, - response_deserializer=language_service.ClassifyTextResponse.deserialize, - ) - return self._stubs['classify_text'] - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - Awaitable[language_service.AnnotateTextResponse]]: - r"""Return a callable for the annotate text method over gRPC. - - A convenience method that provides all the features - that analyzeSentiment, analyzeEntities, and - analyzeSyntax provide in one call. - - Returns: - Callable[[~.AnnotateTextRequest], - Awaitable[~.AnnotateTextResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'annotate_text' not in self._stubs: - self._stubs['annotate_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnnotateText', - request_serializer=language_service.AnnotateTextRequest.serialize, - response_deserializer=language_service.AnnotateTextResponse.deserialize, - ) - return self._stubs['annotate_text'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'LanguageServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py deleted file mode 100644 index d4d50a2e..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py +++ /dev/null @@ -1,907 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.language_v1.types import language_service - -from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class LanguageServiceRestInterceptor: - """Interceptor for LanguageService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the LanguageServiceRestTransport. - - .. code-block:: python - class MyCustomLanguageServiceInterceptor(LanguageServiceRestInterceptor): - def pre_analyze_entities(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_entities(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_entity_sentiment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_entity_sentiment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_sentiment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_sentiment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_syntax(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_syntax(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_annotate_text(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_annotate_text(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_classify_text(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_classify_text(self, response): - logging.log(f"Received response: {response}") - return response - - transport = LanguageServiceRestTransport(interceptor=MyCustomLanguageServiceInterceptor()) - client = LanguageServiceClient(transport=transport) - - - """ - def pre_analyze_entities(self, request: language_service.AnalyzeEntitiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitiesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_entities - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_entities(self, response: language_service.AnalyzeEntitiesResponse) -> language_service.AnalyzeEntitiesResponse: - """Post-rpc interceptor for analyze_entities - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_analyze_entity_sentiment(self, request: language_service.AnalyzeEntitySentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitySentimentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_entity_sentiment - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_entity_sentiment(self, response: language_service.AnalyzeEntitySentimentResponse) -> language_service.AnalyzeEntitySentimentResponse: - """Post-rpc interceptor for analyze_entity_sentiment - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_analyze_sentiment(self, request: language_service.AnalyzeSentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSentimentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_sentiment - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_sentiment(self, response: language_service.AnalyzeSentimentResponse) -> language_service.AnalyzeSentimentResponse: - """Post-rpc interceptor for analyze_sentiment - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_analyze_syntax(self, request: language_service.AnalyzeSyntaxRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSyntaxRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_syntax - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_syntax(self, response: language_service.AnalyzeSyntaxResponse) -> language_service.AnalyzeSyntaxResponse: - """Post-rpc interceptor for analyze_syntax - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_annotate_text(self, request: language_service.AnnotateTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnnotateTextRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for annotate_text - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_annotate_text(self, response: language_service.AnnotateTextResponse) -> language_service.AnnotateTextResponse: - """Post-rpc interceptor for annotate_text - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_classify_text(self, request: language_service.ClassifyTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.ClassifyTextRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for classify_text - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_classify_text(self, response: language_service.ClassifyTextResponse) -> language_service.ClassifyTextResponse: - """Post-rpc interceptor for classify_text - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class LanguageServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: LanguageServiceRestInterceptor - - -class LanguageServiceRestTransport(LanguageServiceTransport): - """REST backend transport for LanguageService. - - Provides text analysis operations such as sentiment analysis - and entity recognition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[LanguageServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or LanguageServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _AnalyzeEntities(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeEntities") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeEntitiesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeEntitiesResponse: - r"""Call the analyze entities method over HTTP. - - Args: - request (~.language_service.AnalyzeEntitiesRequest): - The request object. The entity analysis request message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeEntitiesResponse: - The entity analysis response message. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/documents:analyzeEntities', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_entities(request, metadata) - pb_request = language_service.AnalyzeEntitiesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeEntitiesResponse() - pb_resp = language_service.AnalyzeEntitiesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_entities(resp) - return resp - - class _AnalyzeEntitySentiment(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeEntitySentiment") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeEntitySentimentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeEntitySentimentResponse: - r"""Call the analyze entity sentiment method over HTTP. - - Args: - request (~.language_service.AnalyzeEntitySentimentRequest): - The request object. The entity-level sentiment analysis - request message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeEntitySentimentResponse: - The entity-level sentiment analysis - response message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/documents:analyzeEntitySentiment', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_entity_sentiment(request, metadata) - pb_request = language_service.AnalyzeEntitySentimentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeEntitySentimentResponse() - pb_resp = language_service.AnalyzeEntitySentimentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_entity_sentiment(resp) - return resp - - class _AnalyzeSentiment(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeSentiment") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeSentimentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeSentimentResponse: - r"""Call the analyze sentiment method over HTTP. - - Args: - request (~.language_service.AnalyzeSentimentRequest): - The request object. The sentiment analysis request - message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeSentimentResponse: - The sentiment analysis response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/documents:analyzeSentiment', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_sentiment(request, metadata) - pb_request = language_service.AnalyzeSentimentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeSentimentResponse() - pb_resp = language_service.AnalyzeSentimentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_sentiment(resp) - return resp - - class _AnalyzeSyntax(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeSyntax") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeSyntaxRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeSyntaxResponse: - r"""Call the analyze syntax method over HTTP. - - Args: - request (~.language_service.AnalyzeSyntaxRequest): - The request object. The syntax analysis request message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeSyntaxResponse: - The syntax analysis response message. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/documents:analyzeSyntax', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_syntax(request, metadata) - pb_request = language_service.AnalyzeSyntaxRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeSyntaxResponse() - pb_resp = language_service.AnalyzeSyntaxResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_syntax(resp) - return resp - - class _AnnotateText(LanguageServiceRestStub): - def __hash__(self): - return hash("AnnotateText") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnnotateTextRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnnotateTextResponse: - r"""Call the annotate text method over HTTP. - - Args: - request (~.language_service.AnnotateTextRequest): - The request object. The request message for the text - annotation API, which can perform - multiple analysis types (sentiment, - entities, and syntax) in one call. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnnotateTextResponse: - The text annotations response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/documents:annotateText', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_annotate_text(request, metadata) - pb_request = language_service.AnnotateTextRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnnotateTextResponse() - pb_resp = language_service.AnnotateTextResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_annotate_text(resp) - return resp - - class _ClassifyText(LanguageServiceRestStub): - def __hash__(self): - return hash("ClassifyText") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.ClassifyTextRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.ClassifyTextResponse: - r"""Call the classify text method over HTTP. - - Args: - request (~.language_service.ClassifyTextRequest): - The request object. The document classification request - message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.ClassifyTextResponse: - The document classification response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/documents:classifyText', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_classify_text(request, metadata) - pb_request = language_service.ClassifyTextRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.ClassifyTextResponse() - pb_resp = language_service.ClassifyTextResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_classify_text(resp) - return resp - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - language_service.AnalyzeEntitiesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeEntities(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - language_service.AnalyzeEntitySentimentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeEntitySentiment(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - language_service.AnalyzeSentimentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeSentiment(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - language_service.AnalyzeSyntaxResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeSyntax(self._session, self._host, self._interceptor) # type: ignore - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - language_service.AnnotateTextResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnnotateText(self._session, self._host, self._interceptor) # type: ignore - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - language_service.ClassifyTextResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ClassifyText(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'LanguageServiceRestTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py deleted file mode 100644 index 1b87226f..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py +++ /dev/null @@ -1,68 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .language_service import ( - AnalyzeEntitiesRequest, - AnalyzeEntitiesResponse, - AnalyzeEntitySentimentRequest, - AnalyzeEntitySentimentResponse, - AnalyzeSentimentRequest, - AnalyzeSentimentResponse, - AnalyzeSyntaxRequest, - AnalyzeSyntaxResponse, - AnnotateTextRequest, - AnnotateTextResponse, - ClassificationCategory, - ClassificationModelOptions, - ClassifyTextRequest, - ClassifyTextResponse, - DependencyEdge, - Document, - Entity, - EntityMention, - PartOfSpeech, - Sentence, - Sentiment, - TextSpan, - Token, - EncodingType, -) - -__all__ = ( - 'AnalyzeEntitiesRequest', - 'AnalyzeEntitiesResponse', - 'AnalyzeEntitySentimentRequest', - 'AnalyzeEntitySentimentResponse', - 'AnalyzeSentimentRequest', - 'AnalyzeSentimentResponse', - 'AnalyzeSyntaxRequest', - 'AnalyzeSyntaxResponse', - 'AnnotateTextRequest', - 'AnnotateTextResponse', - 'ClassificationCategory', - 'ClassificationModelOptions', - 'ClassifyTextRequest', - 'ClassifyTextResponse', - 'DependencyEdge', - 'Document', - 'Entity', - 'EntityMention', - 'PartOfSpeech', - 'Sentence', - 'Sentiment', - 'TextSpan', - 'Token', - 'EncodingType', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py b/owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py deleted file mode 100644 index 69ce0bc3..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py +++ /dev/null @@ -1,1677 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.language.v1', - manifest={ - 'EncodingType', - 'Document', - 'Sentence', - 'Entity', - 'Token', - 'Sentiment', - 'PartOfSpeech', - 'DependencyEdge', - 'EntityMention', - 'TextSpan', - 'ClassificationCategory', - 'ClassificationModelOptions', - 'AnalyzeSentimentRequest', - 'AnalyzeSentimentResponse', - 'AnalyzeEntitySentimentRequest', - 'AnalyzeEntitySentimentResponse', - 'AnalyzeEntitiesRequest', - 'AnalyzeEntitiesResponse', - 'AnalyzeSyntaxRequest', - 'AnalyzeSyntaxResponse', - 'ClassifyTextRequest', - 'ClassifyTextResponse', - 'AnnotateTextRequest', - 'AnnotateTextResponse', - }, -) - - -class EncodingType(proto.Enum): - r"""Represents the text encoding that the caller uses to process the - output. Providing an ``EncodingType`` is recommended because the API - provides the beginning offsets for various outputs, such as tokens - and mentions, and languages that natively use different text - encodings may access offsets differently. - - Values: - NONE (0): - If ``EncodingType`` is not specified, encoding-dependent - information (such as ``begin_offset``) will be set at - ``-1``. - UTF8 (1): - Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-8 encoding of the input. C++ and - Go are examples of languages that use this encoding - natively. - UTF16 (2): - Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-16 encoding of the input. Java - and JavaScript are examples of languages that use this - encoding natively. - UTF32 (3): - Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-32 encoding of the input. Python - is an example of a language that uses this encoding - natively. - """ - NONE = 0 - UTF8 = 1 - UTF16 = 2 - UTF32 = 3 - - -class Document(proto.Message): - r"""Represents the input to API methods. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - type_ (google.cloud.language_v1.types.Document.Type): - Required. If the type is not set or is ``TYPE_UNSPECIFIED``, - returns an ``INVALID_ARGUMENT`` error. - content (str): - The content of the input in string format. - Cloud audit logging exempt since it is based on - user data. - - This field is a member of `oneof`_ ``source``. - gcs_content_uri (str): - The Google Cloud Storage URI where the file content is - located. This URI must be of the form: - gs://bucket_name/object_name. For more details, see - https://cloud.google.com/storage/docs/reference-uris. NOTE: - Cloud Storage object versioning is not supported. - - This field is a member of `oneof`_ ``source``. - language (str): - The language of the document (if not specified, the language - is automatically detected). Both ISO and BCP-47 language - codes are accepted. `Language - Support `__ - lists currently supported languages for each API method. If - the language (either specified by the caller or - automatically detected) is not supported by the called API - method, an ``INVALID_ARGUMENT`` error is returned. - """ - class Type(proto.Enum): - r"""The document types enum. - - Values: - TYPE_UNSPECIFIED (0): - The content type is not specified. - PLAIN_TEXT (1): - Plain text - HTML (2): - HTML - """ - TYPE_UNSPECIFIED = 0 - PLAIN_TEXT = 1 - HTML = 2 - - type_: Type = proto.Field( - proto.ENUM, - number=1, - enum=Type, - ) - content: str = proto.Field( - proto.STRING, - number=2, - oneof='source', - ) - gcs_content_uri: str = proto.Field( - proto.STRING, - number=3, - oneof='source', - ) - language: str = proto.Field( - proto.STRING, - number=4, - ) - - -class Sentence(proto.Message): - r"""Represents a sentence in the input document. - - Attributes: - text (google.cloud.language_v1.types.TextSpan): - The sentence text. - sentiment (google.cloud.language_v1.types.Sentiment): - For calls to [AnalyzeSentiment][] or if - [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment] - is set to true, this field will contain the sentiment for - the sentence. - """ - - text: 'TextSpan' = proto.Field( - proto.MESSAGE, - number=1, - message='TextSpan', - ) - sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=2, - message='Sentiment', - ) - - -class Entity(proto.Message): - r"""Represents a phrase in the text that is a known entity, such - as a person, an organization, or location. The API associates - information, such as salience and mentions, with entities. - - Attributes: - name (str): - The representative name for the entity. - type_ (google.cloud.language_v1.types.Entity.Type): - The entity type. - metadata (MutableMapping[str, str]): - Metadata associated with the entity. - - For most entity types, the metadata is a Wikipedia URL - (``wikipedia_url``) and Knowledge Graph MID (``mid``), if - they are available. For the metadata associated with other - entity types, see the Type table below. - salience (float): - The salience score associated with the entity in the [0, - 1.0] range. - - The salience score for an entity provides information about - the importance or centrality of that entity to the entire - document text. Scores closer to 0 are less salient, while - scores closer to 1.0 are highly salient. - mentions (MutableSequence[google.cloud.language_v1.types.EntityMention]): - The mentions of this entity in the input - document. The API currently supports proper noun - mentions. - sentiment (google.cloud.language_v1.types.Sentiment): - For calls to [AnalyzeEntitySentiment][] or if - [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entity_sentiment] - is set to true, this field will contain the aggregate - sentiment expressed for this entity in the provided - document. - """ - class Type(proto.Enum): - r"""The type of the entity. For most entity types, the associated - metadata is a Wikipedia URL (``wikipedia_url``) and Knowledge Graph - MID (``mid``). The table below lists the associated fields for - entities that have different metadata. - - Values: - UNKNOWN (0): - Unknown - PERSON (1): - Person - LOCATION (2): - Location - ORGANIZATION (3): - Organization - EVENT (4): - Event - WORK_OF_ART (5): - Artwork - CONSUMER_GOOD (6): - Consumer product - OTHER (7): - Other types of entities - PHONE_NUMBER (9): - Phone number - - The metadata lists the phone number, formatted according to - local convention, plus whichever additional elements appear - in the text: - - - ``number`` - the actual number, broken down into sections - as per local convention - - ``national_prefix`` - country code, if detected - - ``area_code`` - region or area code, if detected - - ``extension`` - phone extension (to be dialed after - connection), if detected - ADDRESS (10): - Address - - The metadata identifies the street number and locality plus - whichever additional elements appear in the text: - - - ``street_number`` - street number - - ``locality`` - city or town - - ``street_name`` - street/route name, if detected - - ``postal_code`` - postal code, if detected - - ``country`` - country, if detected< - - ``broad_region`` - administrative area, such as the - state, if detected - - ``narrow_region`` - smaller administrative area, such as - county, if detected - - ``sublocality`` - used in Asian addresses to demark a - district within a city, if detected - DATE (11): - Date - - The metadata identifies the components of the date: - - - ``year`` - four digit year, if detected - - ``month`` - two digit month number, if detected - - ``day`` - two digit day number, if detected - NUMBER (12): - Number - The metadata is the number itself. - PRICE (13): - Price - - The metadata identifies the ``value`` and ``currency``. - """ - UNKNOWN = 0 - PERSON = 1 - LOCATION = 2 - ORGANIZATION = 3 - EVENT = 4 - WORK_OF_ART = 5 - CONSUMER_GOOD = 6 - OTHER = 7 - PHONE_NUMBER = 9 - ADDRESS = 10 - DATE = 11 - NUMBER = 12 - PRICE = 13 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: Type = proto.Field( - proto.ENUM, - number=2, - enum=Type, - ) - metadata: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - salience: float = proto.Field( - proto.FLOAT, - number=4, - ) - mentions: MutableSequence['EntityMention'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='EntityMention', - ) - sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=6, - message='Sentiment', - ) - - -class Token(proto.Message): - r"""Represents the smallest syntactic building block of the text. - - Attributes: - text (google.cloud.language_v1.types.TextSpan): - The token text. - part_of_speech (google.cloud.language_v1.types.PartOfSpeech): - Parts of speech tag for this token. - dependency_edge (google.cloud.language_v1.types.DependencyEdge): - Dependency tree parse for this token. - lemma (str): - `Lemma `__ - of the token. - """ - - text: 'TextSpan' = proto.Field( - proto.MESSAGE, - number=1, - message='TextSpan', - ) - part_of_speech: 'PartOfSpeech' = proto.Field( - proto.MESSAGE, - number=2, - message='PartOfSpeech', - ) - dependency_edge: 'DependencyEdge' = proto.Field( - proto.MESSAGE, - number=3, - message='DependencyEdge', - ) - lemma: str = proto.Field( - proto.STRING, - number=4, - ) - - -class Sentiment(proto.Message): - r"""Represents the feeling associated with the entire text or - entities in the text. - - Attributes: - magnitude (float): - A non-negative number in the [0, +inf) range, which - represents the absolute magnitude of sentiment regardless of - score (positive or negative). - score (float): - Sentiment score between -1.0 (negative - sentiment) and 1.0 (positive sentiment). - """ - - magnitude: float = proto.Field( - proto.FLOAT, - number=2, - ) - score: float = proto.Field( - proto.FLOAT, - number=3, - ) - - -class PartOfSpeech(proto.Message): - r"""Represents part of speech information for a token. Parts of speech - are as defined in - http://www.lrec-conf.org/proceedings/lrec2012/pdf/274_Paper.pdf - - Attributes: - tag (google.cloud.language_v1.types.PartOfSpeech.Tag): - The part of speech tag. - aspect (google.cloud.language_v1.types.PartOfSpeech.Aspect): - The grammatical aspect. - case (google.cloud.language_v1.types.PartOfSpeech.Case): - The grammatical case. - form (google.cloud.language_v1.types.PartOfSpeech.Form): - The grammatical form. - gender (google.cloud.language_v1.types.PartOfSpeech.Gender): - The grammatical gender. - mood (google.cloud.language_v1.types.PartOfSpeech.Mood): - The grammatical mood. - number (google.cloud.language_v1.types.PartOfSpeech.Number): - The grammatical number. - person (google.cloud.language_v1.types.PartOfSpeech.Person): - The grammatical person. - proper (google.cloud.language_v1.types.PartOfSpeech.Proper): - The grammatical properness. - reciprocity (google.cloud.language_v1.types.PartOfSpeech.Reciprocity): - The grammatical reciprocity. - tense (google.cloud.language_v1.types.PartOfSpeech.Tense): - The grammatical tense. - voice (google.cloud.language_v1.types.PartOfSpeech.Voice): - The grammatical voice. - """ - class Tag(proto.Enum): - r"""The part of speech tags enum. - - Values: - UNKNOWN (0): - Unknown - ADJ (1): - Adjective - ADP (2): - Adposition (preposition and postposition) - ADV (3): - Adverb - CONJ (4): - Conjunction - DET (5): - Determiner - NOUN (6): - Noun (common and proper) - NUM (7): - Cardinal number - PRON (8): - Pronoun - PRT (9): - Particle or other function word - PUNCT (10): - Punctuation - VERB (11): - Verb (all tenses and modes) - X (12): - Other: foreign words, typos, abbreviations - AFFIX (13): - Affix - """ - UNKNOWN = 0 - ADJ = 1 - ADP = 2 - ADV = 3 - CONJ = 4 - DET = 5 - NOUN = 6 - NUM = 7 - PRON = 8 - PRT = 9 - PUNCT = 10 - VERB = 11 - X = 12 - AFFIX = 13 - - class Aspect(proto.Enum): - r"""The characteristic of a verb that expresses time flow during - an event. - - Values: - ASPECT_UNKNOWN (0): - Aspect is not applicable in the analyzed - language or is not predicted. - PERFECTIVE (1): - Perfective - IMPERFECTIVE (2): - Imperfective - PROGRESSIVE (3): - Progressive - """ - ASPECT_UNKNOWN = 0 - PERFECTIVE = 1 - IMPERFECTIVE = 2 - PROGRESSIVE = 3 - - class Case(proto.Enum): - r"""The grammatical function performed by a noun or pronoun in a - phrase, clause, or sentence. In some languages, other parts of - speech, such as adjective and determiner, take case inflection - in agreement with the noun. - - Values: - CASE_UNKNOWN (0): - Case is not applicable in the analyzed - language or is not predicted. - ACCUSATIVE (1): - Accusative - ADVERBIAL (2): - Adverbial - COMPLEMENTIVE (3): - Complementive - DATIVE (4): - Dative - GENITIVE (5): - Genitive - INSTRUMENTAL (6): - Instrumental - LOCATIVE (7): - Locative - NOMINATIVE (8): - Nominative - OBLIQUE (9): - Oblique - PARTITIVE (10): - Partitive - PREPOSITIONAL (11): - Prepositional - REFLEXIVE_CASE (12): - Reflexive - RELATIVE_CASE (13): - Relative - VOCATIVE (14): - Vocative - """ - CASE_UNKNOWN = 0 - ACCUSATIVE = 1 - ADVERBIAL = 2 - COMPLEMENTIVE = 3 - DATIVE = 4 - GENITIVE = 5 - INSTRUMENTAL = 6 - LOCATIVE = 7 - NOMINATIVE = 8 - OBLIQUE = 9 - PARTITIVE = 10 - PREPOSITIONAL = 11 - REFLEXIVE_CASE = 12 - RELATIVE_CASE = 13 - VOCATIVE = 14 - - class Form(proto.Enum): - r"""Depending on the language, Form can be categorizing different - forms of verbs, adjectives, adverbs, etc. For example, - categorizing inflected endings of verbs and adjectives or - distinguishing between short and long forms of adjectives and - participles - - Values: - FORM_UNKNOWN (0): - Form is not applicable in the analyzed - language or is not predicted. - ADNOMIAL (1): - Adnomial - AUXILIARY (2): - Auxiliary - COMPLEMENTIZER (3): - Complementizer - FINAL_ENDING (4): - Final ending - GERUND (5): - Gerund - REALIS (6): - Realis - IRREALIS (7): - Irrealis - SHORT (8): - Short form - LONG (9): - Long form - ORDER (10): - Order form - SPECIFIC (11): - Specific form - """ - FORM_UNKNOWN = 0 - ADNOMIAL = 1 - AUXILIARY = 2 - COMPLEMENTIZER = 3 - FINAL_ENDING = 4 - GERUND = 5 - REALIS = 6 - IRREALIS = 7 - SHORT = 8 - LONG = 9 - ORDER = 10 - SPECIFIC = 11 - - class Gender(proto.Enum): - r"""Gender classes of nouns reflected in the behaviour of - associated words. - - Values: - GENDER_UNKNOWN (0): - Gender is not applicable in the analyzed - language or is not predicted. - FEMININE (1): - Feminine - MASCULINE (2): - Masculine - NEUTER (3): - Neuter - """ - GENDER_UNKNOWN = 0 - FEMININE = 1 - MASCULINE = 2 - NEUTER = 3 - - class Mood(proto.Enum): - r"""The grammatical feature of verbs, used for showing modality - and attitude. - - Values: - MOOD_UNKNOWN (0): - Mood is not applicable in the analyzed - language or is not predicted. - CONDITIONAL_MOOD (1): - Conditional - IMPERATIVE (2): - Imperative - INDICATIVE (3): - Indicative - INTERROGATIVE (4): - Interrogative - JUSSIVE (5): - Jussive - SUBJUNCTIVE (6): - Subjunctive - """ - MOOD_UNKNOWN = 0 - CONDITIONAL_MOOD = 1 - IMPERATIVE = 2 - INDICATIVE = 3 - INTERROGATIVE = 4 - JUSSIVE = 5 - SUBJUNCTIVE = 6 - - class Number(proto.Enum): - r"""Count distinctions. - - Values: - NUMBER_UNKNOWN (0): - Number is not applicable in the analyzed - language or is not predicted. - SINGULAR (1): - Singular - PLURAL (2): - Plural - DUAL (3): - Dual - """ - NUMBER_UNKNOWN = 0 - SINGULAR = 1 - PLURAL = 2 - DUAL = 3 - - class Person(proto.Enum): - r"""The distinction between the speaker, second person, third - person, etc. - - Values: - PERSON_UNKNOWN (0): - Person is not applicable in the analyzed - language or is not predicted. - FIRST (1): - First - SECOND (2): - Second - THIRD (3): - Third - REFLEXIVE_PERSON (4): - Reflexive - """ - PERSON_UNKNOWN = 0 - FIRST = 1 - SECOND = 2 - THIRD = 3 - REFLEXIVE_PERSON = 4 - - class Proper(proto.Enum): - r"""This category shows if the token is part of a proper name. - - Values: - PROPER_UNKNOWN (0): - Proper is not applicable in the analyzed - language or is not predicted. - PROPER (1): - Proper - NOT_PROPER (2): - Not proper - """ - PROPER_UNKNOWN = 0 - PROPER = 1 - NOT_PROPER = 2 - - class Reciprocity(proto.Enum): - r"""Reciprocal features of a pronoun. - - Values: - RECIPROCITY_UNKNOWN (0): - Reciprocity is not applicable in the analyzed - language or is not predicted. - RECIPROCAL (1): - Reciprocal - NON_RECIPROCAL (2): - Non-reciprocal - """ - RECIPROCITY_UNKNOWN = 0 - RECIPROCAL = 1 - NON_RECIPROCAL = 2 - - class Tense(proto.Enum): - r"""Time reference. - - Values: - TENSE_UNKNOWN (0): - Tense is not applicable in the analyzed - language or is not predicted. - CONDITIONAL_TENSE (1): - Conditional - FUTURE (2): - Future - PAST (3): - Past - PRESENT (4): - Present - IMPERFECT (5): - Imperfect - PLUPERFECT (6): - Pluperfect - """ - TENSE_UNKNOWN = 0 - CONDITIONAL_TENSE = 1 - FUTURE = 2 - PAST = 3 - PRESENT = 4 - IMPERFECT = 5 - PLUPERFECT = 6 - - class Voice(proto.Enum): - r"""The relationship between the action that a verb expresses and - the participants identified by its arguments. - - Values: - VOICE_UNKNOWN (0): - Voice is not applicable in the analyzed - language or is not predicted. - ACTIVE (1): - Active - CAUSATIVE (2): - Causative - PASSIVE (3): - Passive - """ - VOICE_UNKNOWN = 0 - ACTIVE = 1 - CAUSATIVE = 2 - PASSIVE = 3 - - tag: Tag = proto.Field( - proto.ENUM, - number=1, - enum=Tag, - ) - aspect: Aspect = proto.Field( - proto.ENUM, - number=2, - enum=Aspect, - ) - case: Case = proto.Field( - proto.ENUM, - number=3, - enum=Case, - ) - form: Form = proto.Field( - proto.ENUM, - number=4, - enum=Form, - ) - gender: Gender = proto.Field( - proto.ENUM, - number=5, - enum=Gender, - ) - mood: Mood = proto.Field( - proto.ENUM, - number=6, - enum=Mood, - ) - number: Number = proto.Field( - proto.ENUM, - number=7, - enum=Number, - ) - person: Person = proto.Field( - proto.ENUM, - number=8, - enum=Person, - ) - proper: Proper = proto.Field( - proto.ENUM, - number=9, - enum=Proper, - ) - reciprocity: Reciprocity = proto.Field( - proto.ENUM, - number=10, - enum=Reciprocity, - ) - tense: Tense = proto.Field( - proto.ENUM, - number=11, - enum=Tense, - ) - voice: Voice = proto.Field( - proto.ENUM, - number=12, - enum=Voice, - ) - - -class DependencyEdge(proto.Message): - r"""Represents dependency parse tree information for a token. - (For more information on dependency labels, see - http://www.aclweb.org/anthology/P13-2017 - - Attributes: - head_token_index (int): - Represents the head of this token in the dependency tree. - This is the index of the token which has an arc going to - this token. The index is the position of the token in the - array of tokens returned by the API method. If this token is - a root token, then the ``head_token_index`` is its own - index. - label (google.cloud.language_v1.types.DependencyEdge.Label): - The parse label for the token. - """ - class Label(proto.Enum): - r"""The parse label enum for the token. - - Values: - UNKNOWN (0): - Unknown - ABBREV (1): - Abbreviation modifier - ACOMP (2): - Adjectival complement - ADVCL (3): - Adverbial clause modifier - ADVMOD (4): - Adverbial modifier - AMOD (5): - Adjectival modifier of an NP - APPOS (6): - Appositional modifier of an NP - ATTR (7): - Attribute dependent of a copular verb - AUX (8): - Auxiliary (non-main) verb - AUXPASS (9): - Passive auxiliary - CC (10): - Coordinating conjunction - CCOMP (11): - Clausal complement of a verb or adjective - CONJ (12): - Conjunct - CSUBJ (13): - Clausal subject - CSUBJPASS (14): - Clausal passive subject - DEP (15): - Dependency (unable to determine) - DET (16): - Determiner - DISCOURSE (17): - Discourse - DOBJ (18): - Direct object - EXPL (19): - Expletive - GOESWITH (20): - Goes with (part of a word in a text not well - edited) - IOBJ (21): - Indirect object - MARK (22): - Marker (word introducing a subordinate - clause) - MWE (23): - Multi-word expression - MWV (24): - Multi-word verbal expression - NEG (25): - Negation modifier - NN (26): - Noun compound modifier - NPADVMOD (27): - Noun phrase used as an adverbial modifier - NSUBJ (28): - Nominal subject - NSUBJPASS (29): - Passive nominal subject - NUM (30): - Numeric modifier of a noun - NUMBER (31): - Element of compound number - P (32): - Punctuation mark - PARATAXIS (33): - Parataxis relation - PARTMOD (34): - Participial modifier - PCOMP (35): - The complement of a preposition is a clause - POBJ (36): - Object of a preposition - POSS (37): - Possession modifier - POSTNEG (38): - Postverbal negative particle - PRECOMP (39): - Predicate complement - PRECONJ (40): - Preconjunt - PREDET (41): - Predeterminer - PREF (42): - Prefix - PREP (43): - Prepositional modifier - PRONL (44): - The relationship between a verb and verbal - morpheme - PRT (45): - Particle - PS (46): - Associative or possessive marker - QUANTMOD (47): - Quantifier phrase modifier - RCMOD (48): - Relative clause modifier - RCMODREL (49): - Complementizer in relative clause - RDROP (50): - Ellipsis without a preceding predicate - REF (51): - Referent - REMNANT (52): - Remnant - REPARANDUM (53): - Reparandum - ROOT (54): - Root - SNUM (55): - Suffix specifying a unit of number - SUFF (56): - Suffix - TMOD (57): - Temporal modifier - TOPIC (58): - Topic marker - VMOD (59): - Clause headed by an infinite form of the verb - that modifies a noun - VOCATIVE (60): - Vocative - XCOMP (61): - Open clausal complement - SUFFIX (62): - Name suffix - TITLE (63): - Name title - ADVPHMOD (64): - Adverbial phrase modifier - AUXCAUS (65): - Causative auxiliary - AUXVV (66): - Helper auxiliary - DTMOD (67): - Rentaishi (Prenominal modifier) - FOREIGN (68): - Foreign words - KW (69): - Keyword - LIST (70): - List for chains of comparable items - NOMC (71): - Nominalized clause - NOMCSUBJ (72): - Nominalized clausal subject - NOMCSUBJPASS (73): - Nominalized clausal passive - NUMC (74): - Compound of numeric modifier - COP (75): - Copula - DISLOCATED (76): - Dislocated relation (for fronted/topicalized - elements) - ASP (77): - Aspect marker - GMOD (78): - Genitive modifier - GOBJ (79): - Genitive object - INFMOD (80): - Infinitival modifier - MES (81): - Measure - NCOMP (82): - Nominal complement of a noun - """ - UNKNOWN = 0 - ABBREV = 1 - ACOMP = 2 - ADVCL = 3 - ADVMOD = 4 - AMOD = 5 - APPOS = 6 - ATTR = 7 - AUX = 8 - AUXPASS = 9 - CC = 10 - CCOMP = 11 - CONJ = 12 - CSUBJ = 13 - CSUBJPASS = 14 - DEP = 15 - DET = 16 - DISCOURSE = 17 - DOBJ = 18 - EXPL = 19 - GOESWITH = 20 - IOBJ = 21 - MARK = 22 - MWE = 23 - MWV = 24 - NEG = 25 - NN = 26 - NPADVMOD = 27 - NSUBJ = 28 - NSUBJPASS = 29 - NUM = 30 - NUMBER = 31 - P = 32 - PARATAXIS = 33 - PARTMOD = 34 - PCOMP = 35 - POBJ = 36 - POSS = 37 - POSTNEG = 38 - PRECOMP = 39 - PRECONJ = 40 - PREDET = 41 - PREF = 42 - PREP = 43 - PRONL = 44 - PRT = 45 - PS = 46 - QUANTMOD = 47 - RCMOD = 48 - RCMODREL = 49 - RDROP = 50 - REF = 51 - REMNANT = 52 - REPARANDUM = 53 - ROOT = 54 - SNUM = 55 - SUFF = 56 - TMOD = 57 - TOPIC = 58 - VMOD = 59 - VOCATIVE = 60 - XCOMP = 61 - SUFFIX = 62 - TITLE = 63 - ADVPHMOD = 64 - AUXCAUS = 65 - AUXVV = 66 - DTMOD = 67 - FOREIGN = 68 - KW = 69 - LIST = 70 - NOMC = 71 - NOMCSUBJ = 72 - NOMCSUBJPASS = 73 - NUMC = 74 - COP = 75 - DISLOCATED = 76 - ASP = 77 - GMOD = 78 - GOBJ = 79 - INFMOD = 80 - MES = 81 - NCOMP = 82 - - head_token_index: int = proto.Field( - proto.INT32, - number=1, - ) - label: Label = proto.Field( - proto.ENUM, - number=2, - enum=Label, - ) - - -class EntityMention(proto.Message): - r"""Represents a mention for an entity in the text. Currently, - proper noun mentions are supported. - - Attributes: - text (google.cloud.language_v1.types.TextSpan): - The mention text. - type_ (google.cloud.language_v1.types.EntityMention.Type): - The type of the entity mention. - sentiment (google.cloud.language_v1.types.Sentiment): - For calls to [AnalyzeEntitySentiment][] or if - [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entity_sentiment] - is set to true, this field will contain the sentiment - expressed for this mention of the entity in the provided - document. - """ - class Type(proto.Enum): - r"""The supported types of mentions. - - Values: - TYPE_UNKNOWN (0): - Unknown - PROPER (1): - Proper name - COMMON (2): - Common noun (or noun compound) - """ - TYPE_UNKNOWN = 0 - PROPER = 1 - COMMON = 2 - - text: 'TextSpan' = proto.Field( - proto.MESSAGE, - number=1, - message='TextSpan', - ) - type_: Type = proto.Field( - proto.ENUM, - number=2, - enum=Type, - ) - sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=3, - message='Sentiment', - ) - - -class TextSpan(proto.Message): - r"""Represents an output piece of text. - - Attributes: - content (str): - The content of the output text. - begin_offset (int): - The API calculates the beginning offset of the content in - the original document according to the - [EncodingType][google.cloud.language.v1.EncodingType] - specified in the API request. - """ - - content: str = proto.Field( - proto.STRING, - number=1, - ) - begin_offset: int = proto.Field( - proto.INT32, - number=2, - ) - - -class ClassificationCategory(proto.Message): - r"""Represents a category returned from the text classifier. - - Attributes: - name (str): - The name of the category representing the document, from the - `predefined - taxonomy `__. - confidence (float): - The classifier's confidence of the category. - Number represents how certain the classifier is - that this category represents the given text. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - confidence: float = proto.Field( - proto.FLOAT, - number=2, - ) - - -class ClassificationModelOptions(proto.Message): - r"""Model options available for classification requests. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - v1_model (google.cloud.language_v1.types.ClassificationModelOptions.V1Model): - Setting this field will use the V1 model and - V1 content categories version. The V1 model is a - legacy model; support for this will be - discontinued in the future. - - This field is a member of `oneof`_ ``model_type``. - v2_model (google.cloud.language_v1.types.ClassificationModelOptions.V2Model): - Setting this field will use the V2 model with - the appropriate content categories version. The - V2 model is a better performing model. - - This field is a member of `oneof`_ ``model_type``. - """ - - class V1Model(proto.Message): - r"""Options for the V1 model. - """ - - class V2Model(proto.Message): - r"""Options for the V2 model. - - Attributes: - content_categories_version (google.cloud.language_v1.types.ClassificationModelOptions.V2Model.ContentCategoriesVersion): - The content categories used for - classification. - """ - class ContentCategoriesVersion(proto.Enum): - r"""The content categories used for classification. - - Values: - CONTENT_CATEGORIES_VERSION_UNSPECIFIED (0): - If ``ContentCategoriesVersion`` is not specified, this - option will default to ``V1``. - V1 (1): - Legacy content categories of our initial - launch in 2017. - V2 (2): - Updated content categories in 2022. - """ - CONTENT_CATEGORIES_VERSION_UNSPECIFIED = 0 - V1 = 1 - V2 = 2 - - content_categories_version: 'ClassificationModelOptions.V2Model.ContentCategoriesVersion' = proto.Field( - proto.ENUM, - number=1, - enum='ClassificationModelOptions.V2Model.ContentCategoriesVersion', - ) - - v1_model: V1Model = proto.Field( - proto.MESSAGE, - number=1, - oneof='model_type', - message=V1Model, - ) - v2_model: V2Model = proto.Field( - proto.MESSAGE, - number=2, - oneof='model_type', - message=V2Model, - ) - - -class AnalyzeSentimentRequest(proto.Message): - r"""The sentiment analysis request message. - - Attributes: - document (google.cloud.language_v1.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate sentence offsets. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeSentimentResponse(proto.Message): - r"""The sentiment analysis response message. - - Attributes: - document_sentiment (google.cloud.language_v1.types.Sentiment): - The overall sentiment of the input document. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1.Document.language] - field for more details. - sentences (MutableSequence[google.cloud.language_v1.types.Sentence]): - The sentiment for all the sentences in the - document. - """ - - document_sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=1, - message='Sentiment', - ) - language: str = proto.Field( - proto.STRING, - number=2, - ) - sentences: MutableSequence['Sentence'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Sentence', - ) - - -class AnalyzeEntitySentimentRequest(proto.Message): - r"""The entity-level sentiment analysis request message. - - Attributes: - document (google.cloud.language_v1.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeEntitySentimentResponse(proto.Message): - r"""The entity-level sentiment analysis response message. - - Attributes: - entities (MutableSequence[google.cloud.language_v1.types.Entity]): - The recognized entities in the input document - with associated sentiments. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1.Document.language] - field for more details. - """ - - entities: MutableSequence['Entity'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entity', - ) - language: str = proto.Field( - proto.STRING, - number=2, - ) - - -class AnalyzeEntitiesRequest(proto.Message): - r"""The entity analysis request message. - - Attributes: - document (google.cloud.language_v1.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeEntitiesResponse(proto.Message): - r"""The entity analysis response message. - - Attributes: - entities (MutableSequence[google.cloud.language_v1.types.Entity]): - The recognized entities in the input - document. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1.Document.language] - field for more details. - """ - - entities: MutableSequence['Entity'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entity', - ) - language: str = proto.Field( - proto.STRING, - number=2, - ) - - -class AnalyzeSyntaxRequest(proto.Message): - r"""The syntax analysis request message. - - Attributes: - document (google.cloud.language_v1.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeSyntaxResponse(proto.Message): - r"""The syntax analysis response message. - - Attributes: - sentences (MutableSequence[google.cloud.language_v1.types.Sentence]): - Sentences in the input document. - tokens (MutableSequence[google.cloud.language_v1.types.Token]): - Tokens, along with their syntactic - information, in the input document. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1.Document.language] - field for more details. - """ - - sentences: MutableSequence['Sentence'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Sentence', - ) - tokens: MutableSequence['Token'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='Token', - ) - language: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ClassifyTextRequest(proto.Message): - r"""The document classification request message. - - Attributes: - document (google.cloud.language_v1.types.Document): - Required. Input document. - classification_model_options (google.cloud.language_v1.types.ClassificationModelOptions): - Model options to use for classification. - Defaults to v1 options if not specified. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - classification_model_options: 'ClassificationModelOptions' = proto.Field( - proto.MESSAGE, - number=3, - message='ClassificationModelOptions', - ) - - -class ClassifyTextResponse(proto.Message): - r"""The document classification response message. - - Attributes: - categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): - Categories representing the input document. - """ - - categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ClassificationCategory', - ) - - -class AnnotateTextRequest(proto.Message): - r"""The request message for the text annotation API, which can - perform multiple analysis types (sentiment, entities, and - syntax) in one call. - - Attributes: - document (google.cloud.language_v1.types.Document): - Required. Input document. - features (google.cloud.language_v1.types.AnnotateTextRequest.Features): - Required. The enabled features. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - class Features(proto.Message): - r"""All available features for sentiment, syntax, and semantic - analysis. Setting each one to true will enable that specific - analysis for the input. - - Attributes: - extract_syntax (bool): - Extract syntax information. - extract_entities (bool): - Extract entities. - extract_document_sentiment (bool): - Extract document-level sentiment. - extract_entity_sentiment (bool): - Extract entities and their associated - sentiment. - classify_text (bool): - Classify the full document into categories. - classification_model_options (google.cloud.language_v1.types.ClassificationModelOptions): - The model options to use for classification. Defaults to v1 - options if not specified. Only used if ``classify_text`` is - set to true. - """ - - extract_syntax: bool = proto.Field( - proto.BOOL, - number=1, - ) - extract_entities: bool = proto.Field( - proto.BOOL, - number=2, - ) - extract_document_sentiment: bool = proto.Field( - proto.BOOL, - number=3, - ) - extract_entity_sentiment: bool = proto.Field( - proto.BOOL, - number=4, - ) - classify_text: bool = proto.Field( - proto.BOOL, - number=6, - ) - classification_model_options: 'ClassificationModelOptions' = proto.Field( - proto.MESSAGE, - number=10, - message='ClassificationModelOptions', - ) - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - features: Features = proto.Field( - proto.MESSAGE, - number=2, - message=Features, - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=3, - enum='EncodingType', - ) - - -class AnnotateTextResponse(proto.Message): - r"""The text annotations response message. - - Attributes: - sentences (MutableSequence[google.cloud.language_v1.types.Sentence]): - Sentences in the input document. Populated if the user - enables - [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax]. - tokens (MutableSequence[google.cloud.language_v1.types.Token]): - Tokens, along with their syntactic information, in the input - document. Populated if the user enables - [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax]. - entities (MutableSequence[google.cloud.language_v1.types.Entity]): - Entities, along with their semantic information, in the - input document. Populated if the user enables - [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entities]. - document_sentiment (google.cloud.language_v1.types.Sentiment): - The overall sentiment for the document. Populated if the - user enables - [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment]. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1.Document.language] - field for more details. - categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): - Categories identified in the input document. - """ - - sentences: MutableSequence['Sentence'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Sentence', - ) - tokens: MutableSequence['Token'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='Token', - ) - entities: MutableSequence['Entity'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Entity', - ) - document_sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=4, - message='Sentiment', - ) - language: str = proto.Field( - proto.STRING, - number=5, - ) - categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='ClassificationCategory', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini deleted file mode 100644 index 574c5aed..00000000 --- a/owl-bot-staging/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py deleted file mode 100644 index b104aa2e..00000000 --- a/owl-bot-staging/v1/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/language_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py deleted file mode 100644 index 71f2d049..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeEntities_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -async def sample_analyze_entities(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entities(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeEntities_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py deleted file mode 100644 index 14beb557..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeEntities_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -def sample_analyze_entities(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = client.analyze_entities(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeEntities_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py deleted file mode 100644 index a8a1b59b..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntitySentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeEntitySentiment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -async def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeEntitySentiment_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py deleted file mode 100644 index c6d27ac8..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntitySentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeEntitySentiment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeEntitySentiment_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py deleted file mode 100644 index 6b65f274..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeSentiment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -async def sample_analyze_sentiment(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeSentiment_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py deleted file mode 100644 index c9a48df7..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeSentiment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -def sample_analyze_sentiment(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeSentiment_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py deleted file mode 100644 index 31640e52..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSyntax -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeSyntax_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -async def sample_analyze_syntax(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = await client.analyze_syntax(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeSyntax_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py deleted file mode 100644 index 947613db..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSyntax -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeSyntax_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -def sample_analyze_syntax(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = client.analyze_syntax(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeSyntax_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py deleted file mode 100644 index 02a54aee..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnnotateText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnnotateText_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -async def sample_annotate_text(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = await client.annotate_text(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnnotateText_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py deleted file mode 100644 index 9d90a0f0..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnnotateText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnnotateText_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -def sample_annotate_text(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = client.annotate_text(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnnotateText_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py deleted file mode 100644 index a6497c09..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ClassifyText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_ClassifyText_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -async def sample_classify_text(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = await client.classify_text(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_ClassifyText_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py deleted file mode 100644 index e1d32646..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ClassifyText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_ClassifyText_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -def sample_classify_text(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = client.classify_text(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_ClassifyText_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json deleted file mode 100644 index 936a8b70..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json +++ /dev/null @@ -1,1029 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.language.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-language", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_entities", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntities", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeEntitiesRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeEntitiesResponse", - "shortName": "analyze_entities" - }, - "description": "Sample for AnalyzeEntities", - "file": "language_v1_generated_language_service_analyze_entities_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeEntities_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_entities_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_entities", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntities", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeEntitiesRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeEntitiesResponse", - "shortName": "analyze_entities" - }, - "description": "Sample for AnalyzeEntities", - "file": "language_v1_generated_language_service_analyze_entities_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeEntities_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_entities_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_entity_sentiment", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntitySentiment", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntitySentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeEntitySentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeEntitySentimentResponse", - "shortName": "analyze_entity_sentiment" - }, - "description": "Sample for AnalyzeEntitySentiment", - "file": "language_v1_generated_language_service_analyze_entity_sentiment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeEntitySentiment_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_entity_sentiment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_entity_sentiment", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntitySentiment", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntitySentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeEntitySentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeEntitySentimentResponse", - "shortName": "analyze_entity_sentiment" - }, - "description": "Sample for AnalyzeEntitySentiment", - "file": "language_v1_generated_language_service_analyze_entity_sentiment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeEntitySentiment_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_entity_sentiment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_sentiment", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSentiment", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeSentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeSentimentResponse", - "shortName": "analyze_sentiment" - }, - "description": "Sample for AnalyzeSentiment", - "file": "language_v1_generated_language_service_analyze_sentiment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeSentiment_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_sentiment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_sentiment", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSentiment", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeSentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeSentimentResponse", - "shortName": "analyze_sentiment" - }, - "description": "Sample for AnalyzeSentiment", - "file": "language_v1_generated_language_service_analyze_sentiment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeSentiment_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_sentiment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_syntax", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSyntax", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSyntax" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeSyntaxRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeSyntaxResponse", - "shortName": "analyze_syntax" - }, - "description": "Sample for AnalyzeSyntax", - "file": "language_v1_generated_language_service_analyze_syntax_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeSyntax_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_syntax_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_syntax", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSyntax", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSyntax" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeSyntaxRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeSyntaxResponse", - "shortName": "analyze_syntax" - }, - "description": "Sample for AnalyzeSyntax", - "file": "language_v1_generated_language_service_analyze_syntax_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeSyntax_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_syntax_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.annotate_text", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnnotateText", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnnotateText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnnotateTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "features", - "type": "google.cloud.language_v1.types.AnnotateTextRequest.Features" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnnotateTextResponse", - "shortName": "annotate_text" - }, - "description": "Sample for AnnotateText", - "file": "language_v1_generated_language_service_annotate_text_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnnotateText_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_annotate_text_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceClient.annotate_text", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnnotateText", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnnotateText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnnotateTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "features", - "type": "google.cloud.language_v1.types.AnnotateTextRequest.Features" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnnotateTextResponse", - "shortName": "annotate_text" - }, - "description": "Sample for AnnotateText", - "file": "language_v1_generated_language_service_annotate_text_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnnotateText_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_annotate_text_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.classify_text", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.ClassifyText", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "ClassifyText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.ClassifyTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.ClassifyTextResponse", - "shortName": "classify_text" - }, - "description": "Sample for ClassifyText", - "file": "language_v1_generated_language_service_classify_text_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_ClassifyText_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_classify_text_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceClient.classify_text", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.ClassifyText", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "ClassifyText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.ClassifyTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.ClassifyTextResponse", - "shortName": "classify_text" - }, - "description": "Sample for ClassifyText", - "file": "language_v1_generated_language_service_classify_text_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_ClassifyText_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_classify_text_sync.py" - } - ] -} diff --git a/owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py deleted file mode 100644 index fc15df57..00000000 --- a/owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py +++ /dev/null @@ -1,181 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class languageCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'analyze_entities': ('document', 'encoding_type', ), - 'analyze_entity_sentiment': ('document', 'encoding_type', ), - 'analyze_sentiment': ('document', 'encoding_type', ), - 'analyze_syntax': ('document', 'encoding_type', ), - 'annotate_text': ('document', 'features', 'encoding_type', ), - 'classify_text': ('document', 'classification_model_options', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=languageCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the language client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py deleted file mode 100644 index 047e5bce..00000000 --- a/owl-bot-staging/v1/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-language' - - -description = "Google Cloud Language API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/language/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/python-language" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v1/testing/constraints-3.10.txt b/owl-bot-staging/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.11.txt b/owl-bot-staging/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.12.txt b/owl-bot-staging/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.7.txt b/owl-bot-staging/v1/testing/constraints-3.7.txt deleted file mode 100644 index 6c44adfe..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,9 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 diff --git a/owl-bot-staging/v1/testing/constraints-3.8.txt b/owl-bot-staging/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.9.txt b/owl-bot-staging/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py b/owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py deleted file mode 100644 index 80a2a6a3..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py +++ /dev/null @@ -1,3674 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.language_v1.services.language_service import LanguageServiceAsyncClient -from google.cloud.language_v1.services.language_service import LanguageServiceClient -from google.cloud.language_v1.services.language_service import transports -from google.cloud.language_v1.types import language_service -from google.oauth2 import service_account -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert LanguageServiceClient._get_default_mtls_endpoint(None) is None - assert LanguageServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (LanguageServiceClient, "grpc"), - (LanguageServiceAsyncClient, "grpc_asyncio"), - (LanguageServiceClient, "rest"), -]) -def test_language_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'language.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://language.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.LanguageServiceGrpcTransport, "grpc"), - (transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.LanguageServiceRestTransport, "rest"), -]) -def test_language_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (LanguageServiceClient, "grpc"), - (LanguageServiceAsyncClient, "grpc_asyncio"), - (LanguageServiceClient, "rest"), -]) -def test_language_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'language.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://language.googleapis.com' - ) - - -def test_language_service_client_get_transport_class(): - transport = LanguageServiceClient.get_transport_class() - available_transports = [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceRestTransport, - ] - assert transport in available_transports - - transport = LanguageServiceClient.get_transport_class("grpc") - assert transport == transports.LanguageServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), -]) -@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) -@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) -def test_language_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "true"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "false"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "true"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) -@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_language_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - LanguageServiceClient, LanguageServiceAsyncClient -]) -@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) -@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) -def test_language_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), -]) -def test_language_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", None), -]) -def test_language_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_language_service_client_client_options_from_dict(): - with mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = LanguageServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_language_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "language.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="language.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSentimentRequest, - dict, -]) -def test_analyze_sentiment(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSentimentResponse( - language='language_value', - ) - response = client.analyze_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_sentiment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - client.analyze_sentiment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSentimentRequest() - -@pytest.mark.asyncio -async def test_analyze_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSentimentRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse( - language='language_value', - )) - response = await client.analyze_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSentimentResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_sentiment_async_from_dict(): - await test_analyze_sentiment_async(request_type=dict) - - -def test_analyze_sentiment_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSentimentResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_sentiment_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_sentiment( - language_service.AnalyzeSentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_sentiment_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSentimentResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_sentiment_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_sentiment( - language_service.AnalyzeSentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitiesRequest, - dict, -]) -def test_analyze_entities(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitiesResponse( - language='language_value', - ) - response = client.analyze_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitiesResponse) - assert response.language == 'language_value' - - -def test_analyze_entities_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - client.analyze_entities() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitiesRequest() - -@pytest.mark.asyncio -async def test_analyze_entities_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitiesRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse( - language='language_value', - )) - response = await client.analyze_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitiesResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_entities_async_from_dict(): - await test_analyze_entities_async(request_type=dict) - - -def test_analyze_entities_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitiesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_entities( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_entities_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entities( - language_service.AnalyzeEntitiesRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_entities_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitiesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_entities( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_entities_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_entities( - language_service.AnalyzeEntitiesRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitySentimentRequest, - dict, -]) -def test_analyze_entity_sentiment(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitySentimentResponse( - language='language_value', - ) - response = client.analyze_entity_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitySentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_entity_sentiment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - client.analyze_entity_sentiment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitySentimentRequest() - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitySentimentRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse( - language='language_value', - )) - response = await client.analyze_entity_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitySentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_async_from_dict(): - await test_analyze_entity_sentiment_async(request_type=dict) - - -def test_analyze_entity_sentiment_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitySentimentResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_entity_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_entity_sentiment_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entity_sentiment( - language_service.AnalyzeEntitySentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitySentimentResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_entity_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_entity_sentiment( - language_service.AnalyzeEntitySentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSyntaxRequest, - dict, -]) -def test_analyze_syntax(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSyntaxResponse( - language='language_value', - ) - response = client.analyze_syntax(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSyntaxRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSyntaxResponse) - assert response.language == 'language_value' - - -def test_analyze_syntax_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - client.analyze_syntax() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSyntaxRequest() - -@pytest.mark.asyncio -async def test_analyze_syntax_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSyntaxRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse( - language='language_value', - )) - response = await client.analyze_syntax(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSyntaxRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSyntaxResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_syntax_async_from_dict(): - await test_analyze_syntax_async(request_type=dict) - - -def test_analyze_syntax_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSyntaxResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_syntax( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_syntax_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_syntax( - language_service.AnalyzeSyntaxRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_syntax_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSyntaxResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_syntax( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_syntax_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_syntax( - language_service.AnalyzeSyntaxRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.ClassifyTextRequest, - dict, -]) -def test_classify_text(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ClassifyTextResponse( - ) - response = client.classify_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ClassifyTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ClassifyTextResponse) - - -def test_classify_text_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - client.classify_text() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ClassifyTextRequest() - -@pytest.mark.asyncio -async def test_classify_text_async(transport: str = 'grpc_asyncio', request_type=language_service.ClassifyTextRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse( - )) - response = await client.classify_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ClassifyTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ClassifyTextResponse) - - -@pytest.mark.asyncio -async def test_classify_text_async_from_dict(): - await test_classify_text_async(request_type=dict) - - -def test_classify_text_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ClassifyTextResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.classify_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - - -def test_classify_text_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.classify_text( - language_service.ClassifyTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - -@pytest.mark.asyncio -async def test_classify_text_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ClassifyTextResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.classify_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_classify_text_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.classify_text( - language_service.ClassifyTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnnotateTextRequest, - dict, -]) -def test_annotate_text(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnnotateTextResponse( - language='language_value', - ) - response = client.annotate_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnnotateTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnnotateTextResponse) - assert response.language == 'language_value' - - -def test_annotate_text_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - client.annotate_text() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnnotateTextRequest() - -@pytest.mark.asyncio -async def test_annotate_text_async(transport: str = 'grpc_asyncio', request_type=language_service.AnnotateTextRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse( - language='language_value', - )) - response = await client.annotate_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnnotateTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnnotateTextResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_annotate_text_async_from_dict(): - await test_annotate_text_async(request_type=dict) - - -def test_annotate_text_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnnotateTextResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.annotate_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].features - mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_annotate_text_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.annotate_text( - language_service.AnnotateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_annotate_text_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnnotateTextResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.annotate_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].features - mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_annotate_text_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.annotate_text( - language_service.AnnotateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSentimentRequest, - dict, -]) -def test_analyze_sentiment_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSentimentResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_sentiment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_sentiment_rest_required_fields(request_type=language_service.AnalyzeSentimentRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSentimentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_sentiment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_sentiment_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_sentiment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_sentiment_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_sentiment") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_sentiment") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeSentimentRequest.pb(language_service.AnalyzeSentimentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeSentimentResponse.to_json(language_service.AnalyzeSentimentResponse()) - - request = language_service.AnalyzeSentimentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeSentimentResponse() - - client.analyze_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSentimentRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_sentiment(request) - - -def test_analyze_sentiment_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSentimentResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_sentiment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/documents:analyzeSentiment" % client.transport._host, args[1]) - - -def test_analyze_sentiment_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_sentiment( - language_service.AnalyzeSentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_sentiment_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitiesRequest, - dict, -]) -def test_analyze_entities_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitiesResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_entities(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitiesResponse) - assert response.language == 'language_value' - - -def test_analyze_entities_rest_required_fields(request_type=language_service.AnalyzeEntitiesRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitiesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_entities(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_entities_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_entities._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_entities_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entities") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entities") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeEntitiesRequest.pb(language_service.AnalyzeEntitiesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeEntitiesResponse.to_json(language_service.AnalyzeEntitiesResponse()) - - request = language_service.AnalyzeEntitiesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeEntitiesResponse() - - client.analyze_entities(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_entities_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitiesRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_entities(request) - - -def test_analyze_entities_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitiesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_entities(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/documents:analyzeEntities" % client.transport._host, args[1]) - - -def test_analyze_entities_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entities( - language_service.AnalyzeEntitiesRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_entities_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitySentimentRequest, - dict, -]) -def test_analyze_entity_sentiment_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitySentimentResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_entity_sentiment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_entity_sentiment_rest_required_fields(request_type=language_service.AnalyzeEntitySentimentRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitySentimentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_entity_sentiment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_entity_sentiment_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_entity_sentiment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entity_sentiment") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entity_sentiment") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeEntitySentimentRequest.pb(language_service.AnalyzeEntitySentimentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeEntitySentimentResponse.to_json(language_service.AnalyzeEntitySentimentResponse()) - - request = language_service.AnalyzeEntitySentimentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeEntitySentimentResponse() - - client.analyze_entity_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_entity_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitySentimentRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_entity_sentiment(request) - - -def test_analyze_entity_sentiment_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitySentimentResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_entity_sentiment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/documents:analyzeEntitySentiment" % client.transport._host, args[1]) - - -def test_analyze_entity_sentiment_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entity_sentiment( - language_service.AnalyzeEntitySentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_entity_sentiment_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSyntaxRequest, - dict, -]) -def test_analyze_syntax_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSyntaxResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_syntax(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSyntaxResponse) - assert response.language == 'language_value' - - -def test_analyze_syntax_rest_required_fields(request_type=language_service.AnalyzeSyntaxRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSyntaxResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_syntax(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_syntax_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_syntax._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_syntax_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_syntax") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_syntax") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeSyntaxRequest.pb(language_service.AnalyzeSyntaxRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeSyntaxResponse.to_json(language_service.AnalyzeSyntaxResponse()) - - request = language_service.AnalyzeSyntaxRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeSyntaxResponse() - - client.analyze_syntax(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_syntax_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSyntaxRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_syntax(request) - - -def test_analyze_syntax_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSyntaxResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_syntax(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/documents:analyzeSyntax" % client.transport._host, args[1]) - - -def test_analyze_syntax_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_syntax( - language_service.AnalyzeSyntaxRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_syntax_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.ClassifyTextRequest, - dict, -]) -def test_classify_text_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.ClassifyTextResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.ClassifyTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.classify_text(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ClassifyTextResponse) - - -def test_classify_text_rest_required_fields(request_type=language_service.ClassifyTextRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.ClassifyTextResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.ClassifyTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.classify_text(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_classify_text_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.classify_text._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_classify_text_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_classify_text") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_classify_text") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.ClassifyTextRequest.pb(language_service.ClassifyTextRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.ClassifyTextResponse.to_json(language_service.ClassifyTextResponse()) - - request = language_service.ClassifyTextRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.ClassifyTextResponse() - - client.classify_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_classify_text_rest_bad_request(transport: str = 'rest', request_type=language_service.ClassifyTextRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.classify_text(request) - - -def test_classify_text_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.ClassifyTextResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.ClassifyTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.classify_text(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/documents:classifyText" % client.transport._host, args[1]) - - -def test_classify_text_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.classify_text( - language_service.ClassifyTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - -def test_classify_text_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnnotateTextRequest, - dict, -]) -def test_annotate_text_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnnotateTextResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnnotateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.annotate_text(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnnotateTextResponse) - assert response.language == 'language_value' - - -def test_annotate_text_rest_required_fields(request_type=language_service.AnnotateTextRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnnotateTextResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnnotateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.annotate_text(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_annotate_text_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.annotate_text._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", "features", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_annotate_text_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_annotate_text") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_annotate_text") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnnotateTextRequest.pb(language_service.AnnotateTextRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnnotateTextResponse.to_json(language_service.AnnotateTextResponse()) - - request = language_service.AnnotateTextRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnnotateTextResponse() - - client.annotate_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_annotate_text_rest_bad_request(transport: str = 'rest', request_type=language_service.AnnotateTextRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.annotate_text(request) - - -def test_annotate_text_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnnotateTextResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnnotateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.annotate_text(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/documents:annotateText" % client.transport._host, args[1]) - - -def test_annotate_text_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.annotate_text( - language_service.AnnotateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_annotate_text_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = LanguageServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.LanguageServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceGrpcAsyncIOTransport, - transports.LanguageServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = LanguageServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.LanguageServiceGrpcTransport, - ) - -def test_language_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.LanguageServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_language_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.LanguageServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'analyze_sentiment', - 'analyze_entities', - 'analyze_entity_sentiment', - 'analyze_syntax', - 'classify_text', - 'annotate_text', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_language_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LanguageServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_language_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LanguageServiceTransport() - adc.assert_called_once() - - -def test_language_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LanguageServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceGrpcAsyncIOTransport, - ], -) -def test_language_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-language', 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceGrpcAsyncIOTransport, - transports.LanguageServiceRestTransport, - ], -) -def test_language_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.LanguageServiceGrpcTransport, grpc_helpers), - (transports.LanguageServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_language_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "language.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="language.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) -def test_language_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_language_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.LanguageServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_language_service_host_no_port(transport_name): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'language.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://language.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_language_service_host_with_port(transport_name): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'language.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://language.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_language_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = LanguageServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = LanguageServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.analyze_sentiment._session - session2 = client2.transport.analyze_sentiment._session - assert session1 != session2 - session1 = client1.transport.analyze_entities._session - session2 = client2.transport.analyze_entities._session - assert session1 != session2 - session1 = client1.transport.analyze_entity_sentiment._session - session2 = client2.transport.analyze_entity_sentiment._session - assert session1 != session2 - session1 = client1.transport.analyze_syntax._session - session2 = client2.transport.analyze_syntax._session - assert session1 != session2 - session1 = client1.transport.classify_text._session - session2 = client2.transport.classify_text._session - assert session1 != session2 - session1 = client1.transport.annotate_text._session - session2 = client2.transport.annotate_text._session - assert session1 != session2 -def test_language_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LanguageServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_language_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LanguageServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) -def test_language_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) -def test_language_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = LanguageServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = LanguageServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = LanguageServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = LanguageServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = LanguageServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = LanguageServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = LanguageServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = LanguageServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = LanguageServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = LanguageServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = LanguageServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1beta2/.coveragerc b/owl-bot-staging/v1beta2/.coveragerc deleted file mode 100644 index c1f51536..00000000 --- a/owl-bot-staging/v1beta2/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/language/__init__.py - google/cloud/language/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v1beta2/.flake8 b/owl-bot-staging/v1beta2/.flake8 deleted file mode 100644 index 29227d4c..00000000 --- a/owl-bot-staging/v1beta2/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v1beta2/MANIFEST.in b/owl-bot-staging/v1beta2/MANIFEST.in deleted file mode 100644 index dcc097e7..00000000 --- a/owl-bot-staging/v1beta2/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/language *.py -recursive-include google/cloud/language_v1beta2 *.py diff --git a/owl-bot-staging/v1beta2/README.rst b/owl-bot-staging/v1beta2/README.rst deleted file mode 100644 index 0c5f1b6b..00000000 --- a/owl-bot-staging/v1beta2/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Language API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Language API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1beta2/docs/conf.py b/owl-bot-staging/v1beta2/docs/conf.py deleted file mode 100644 index 2e1b322d..00000000 --- a/owl-bot-staging/v1beta2/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-language documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-language" -copyright = u"2022, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-language-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-language.tex", - u"google-cloud-language Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-language", - u"Google Cloud Language Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-language", - u"google-cloud-language Documentation", - author, - "google-cloud-language", - "GAPIC library for Google Cloud Language API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v1beta2/docs/index.rst b/owl-bot-staging/v1beta2/docs/index.rst deleted file mode 100644 index 42b8e680..00000000 --- a/owl-bot-staging/v1beta2/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - language_v1beta2/services - language_v1beta2/types diff --git a/owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst b/owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst deleted file mode 100644 index 799a7892..00000000 --- a/owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst +++ /dev/null @@ -1,6 +0,0 @@ -LanguageService ---------------------------------- - -.. automodule:: google.cloud.language_v1beta2.services.language_service - :members: - :inherited-members: diff --git a/owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst b/owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst deleted file mode 100644 index 40ead585..00000000 --- a/owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Language v1beta2 API -============================================== -.. toctree:: - :maxdepth: 2 - - language_service diff --git a/owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst b/owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst deleted file mode 100644 index 2e834e61..00000000 --- a/owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Language v1beta2 API -=========================================== - -.. automodule:: google.cloud.language_v1beta2.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v1beta2/google/cloud/language/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language/__init__.py deleted file mode 100644 index 6bfa0911..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language/__init__.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.language import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.language_v1beta2.services.language_service.client import LanguageServiceClient -from google.cloud.language_v1beta2.services.language_service.async_client import LanguageServiceAsyncClient - -from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitiesRequest -from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitiesResponse -from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitySentimentRequest -from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitySentimentResponse -from google.cloud.language_v1beta2.types.language_service import AnalyzeSentimentRequest -from google.cloud.language_v1beta2.types.language_service import AnalyzeSentimentResponse -from google.cloud.language_v1beta2.types.language_service import AnalyzeSyntaxRequest -from google.cloud.language_v1beta2.types.language_service import AnalyzeSyntaxResponse -from google.cloud.language_v1beta2.types.language_service import AnnotateTextRequest -from google.cloud.language_v1beta2.types.language_service import AnnotateTextResponse -from google.cloud.language_v1beta2.types.language_service import ClassificationCategory -from google.cloud.language_v1beta2.types.language_service import ClassificationModelOptions -from google.cloud.language_v1beta2.types.language_service import ClassifyTextRequest -from google.cloud.language_v1beta2.types.language_service import ClassifyTextResponse -from google.cloud.language_v1beta2.types.language_service import DependencyEdge -from google.cloud.language_v1beta2.types.language_service import Document -from google.cloud.language_v1beta2.types.language_service import Entity -from google.cloud.language_v1beta2.types.language_service import EntityMention -from google.cloud.language_v1beta2.types.language_service import ModerateTextRequest -from google.cloud.language_v1beta2.types.language_service import ModerateTextResponse -from google.cloud.language_v1beta2.types.language_service import PartOfSpeech -from google.cloud.language_v1beta2.types.language_service import Sentence -from google.cloud.language_v1beta2.types.language_service import Sentiment -from google.cloud.language_v1beta2.types.language_service import TextSpan -from google.cloud.language_v1beta2.types.language_service import Token -from google.cloud.language_v1beta2.types.language_service import EncodingType - -__all__ = ('LanguageServiceClient', - 'LanguageServiceAsyncClient', - 'AnalyzeEntitiesRequest', - 'AnalyzeEntitiesResponse', - 'AnalyzeEntitySentimentRequest', - 'AnalyzeEntitySentimentResponse', - 'AnalyzeSentimentRequest', - 'AnalyzeSentimentResponse', - 'AnalyzeSyntaxRequest', - 'AnalyzeSyntaxResponse', - 'AnnotateTextRequest', - 'AnnotateTextResponse', - 'ClassificationCategory', - 'ClassificationModelOptions', - 'ClassifyTextRequest', - 'ClassifyTextResponse', - 'DependencyEdge', - 'Document', - 'Entity', - 'EntityMention', - 'ModerateTextRequest', - 'ModerateTextResponse', - 'PartOfSpeech', - 'Sentence', - 'Sentiment', - 'TextSpan', - 'Token', - 'EncodingType', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py b/owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py deleted file mode 100644 index 405b1ceb..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1beta2/google/cloud/language/py.typed b/owl-bot-staging/v1beta2/google/cloud/language/py.typed deleted file mode 100644 index c0acc99a..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py deleted file mode 100644 index e6a87024..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py +++ /dev/null @@ -1,80 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.language_v1beta2 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.language_service import LanguageServiceClient -from .services.language_service import LanguageServiceAsyncClient - -from .types.language_service import AnalyzeEntitiesRequest -from .types.language_service import AnalyzeEntitiesResponse -from .types.language_service import AnalyzeEntitySentimentRequest -from .types.language_service import AnalyzeEntitySentimentResponse -from .types.language_service import AnalyzeSentimentRequest -from .types.language_service import AnalyzeSentimentResponse -from .types.language_service import AnalyzeSyntaxRequest -from .types.language_service import AnalyzeSyntaxResponse -from .types.language_service import AnnotateTextRequest -from .types.language_service import AnnotateTextResponse -from .types.language_service import ClassificationCategory -from .types.language_service import ClassificationModelOptions -from .types.language_service import ClassifyTextRequest -from .types.language_service import ClassifyTextResponse -from .types.language_service import DependencyEdge -from .types.language_service import Document -from .types.language_service import Entity -from .types.language_service import EntityMention -from .types.language_service import ModerateTextRequest -from .types.language_service import ModerateTextResponse -from .types.language_service import PartOfSpeech -from .types.language_service import Sentence -from .types.language_service import Sentiment -from .types.language_service import TextSpan -from .types.language_service import Token -from .types.language_service import EncodingType - -__all__ = ( - 'LanguageServiceAsyncClient', -'AnalyzeEntitiesRequest', -'AnalyzeEntitiesResponse', -'AnalyzeEntitySentimentRequest', -'AnalyzeEntitySentimentResponse', -'AnalyzeSentimentRequest', -'AnalyzeSentimentResponse', -'AnalyzeSyntaxRequest', -'AnalyzeSyntaxResponse', -'AnnotateTextRequest', -'AnnotateTextResponse', -'ClassificationCategory', -'ClassificationModelOptions', -'ClassifyTextRequest', -'ClassifyTextResponse', -'DependencyEdge', -'Document', -'EncodingType', -'Entity', -'EntityMention', -'LanguageServiceClient', -'ModerateTextRequest', -'ModerateTextResponse', -'PartOfSpeech', -'Sentence', -'Sentiment', -'TextSpan', -'Token', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json deleted file mode 100644 index 85a901f9..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json +++ /dev/null @@ -1,133 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.language_v1beta2", - "protoPackage": "google.cloud.language.v1beta2", - "schema": "1.0", - "services": { - "LanguageService": { - "clients": { - "grpc": { - "libraryClient": "LanguageServiceClient", - "rpcs": { - "AnalyzeEntities": { - "methods": [ - "analyze_entities" - ] - }, - "AnalyzeEntitySentiment": { - "methods": [ - "analyze_entity_sentiment" - ] - }, - "AnalyzeSentiment": { - "methods": [ - "analyze_sentiment" - ] - }, - "AnalyzeSyntax": { - "methods": [ - "analyze_syntax" - ] - }, - "AnnotateText": { - "methods": [ - "annotate_text" - ] - }, - "ClassifyText": { - "methods": [ - "classify_text" - ] - }, - "ModerateText": { - "methods": [ - "moderate_text" - ] - } - } - }, - "grpc-async": { - "libraryClient": "LanguageServiceAsyncClient", - "rpcs": { - "AnalyzeEntities": { - "methods": [ - "analyze_entities" - ] - }, - "AnalyzeEntitySentiment": { - "methods": [ - "analyze_entity_sentiment" - ] - }, - "AnalyzeSentiment": { - "methods": [ - "analyze_sentiment" - ] - }, - "AnalyzeSyntax": { - "methods": [ - "analyze_syntax" - ] - }, - "AnnotateText": { - "methods": [ - "annotate_text" - ] - }, - "ClassifyText": { - "methods": [ - "classify_text" - ] - }, - "ModerateText": { - "methods": [ - "moderate_text" - ] - } - } - }, - "rest": { - "libraryClient": "LanguageServiceClient", - "rpcs": { - "AnalyzeEntities": { - "methods": [ - "analyze_entities" - ] - }, - "AnalyzeEntitySentiment": { - "methods": [ - "analyze_entity_sentiment" - ] - }, - "AnalyzeSentiment": { - "methods": [ - "analyze_sentiment" - ] - }, - "AnalyzeSyntax": { - "methods": [ - "analyze_syntax" - ] - }, - "AnnotateText": { - "methods": [ - "annotate_text" - ] - }, - "ClassifyText": { - "methods": [ - "classify_text" - ] - }, - "ModerateText": { - "methods": [ - "moderate_text" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py deleted file mode 100644 index 405b1ceb..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed deleted file mode 100644 index c0acc99a..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py deleted file mode 100644 index e8e1c384..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py deleted file mode 100644 index 6e5f9052..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import LanguageServiceClient -from .async_client import LanguageServiceAsyncClient - -__all__ = ( - 'LanguageServiceClient', - 'LanguageServiceAsyncClient', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py deleted file mode 100644 index a3a46b78..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py +++ /dev/null @@ -1,963 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.language_v1beta2 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.language_v1beta2.types import language_service -from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport -from .client import LanguageServiceClient - - -class LanguageServiceAsyncClient: - """Provides text analysis operations such as sentiment analysis - and entity recognition. - """ - - _client: LanguageServiceClient - - DEFAULT_ENDPOINT = LanguageServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = LanguageServiceClient.DEFAULT_MTLS_ENDPOINT - - common_billing_account_path = staticmethod(LanguageServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(LanguageServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(LanguageServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(LanguageServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(LanguageServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(LanguageServiceClient.parse_common_organization_path) - common_project_path = staticmethod(LanguageServiceClient.common_project_path) - parse_common_project_path = staticmethod(LanguageServiceClient.parse_common_project_path) - common_location_path = staticmethod(LanguageServiceClient.common_location_path) - parse_common_location_path = staticmethod(LanguageServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceAsyncClient: The constructed client. - """ - return LanguageServiceClient.from_service_account_info.__func__(LanguageServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceAsyncClient: The constructed client. - """ - return LanguageServiceClient.from_service_account_file.__func__(LanguageServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return LanguageServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> LanguageServiceTransport: - """Returns the transport used by the client instance. - - Returns: - LanguageServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(LanguageServiceClient).get_transport_class, type(LanguageServiceClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, LanguageServiceTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the language service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.LanguageServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = LanguageServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def analyze_sentiment(self, - request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSentimentResponse: - r"""Analyzes the sentiment of the provided text. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - async def sample_analyze_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeSentimentRequest, dict]]): - The request object. The sentiment analysis request - message. - document (:class:`google.cloud.language_v1beta2.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): - The encoding type used by the API to - calculate sentence offsets for the - sentence sentiment. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeSentimentResponse: - The sentiment analysis response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeSentimentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_entities(self, - request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitiesResponse: - r"""Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - async def sample_analyze_entities(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entities(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest, dict]]): - The request object. The entity analysis request message. - document (:class:`google.cloud.language_v1beta2.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse: - The entity analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeEntitiesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_entities, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_entity_sentiment(self, - request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitySentimentResponse: - r"""Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - async def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest, dict]]): - The request object. The entity-level sentiment analysis - request message. - document (:class:`google.cloud.language_v1beta2.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse: - The entity-level sentiment analysis - response message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeEntitySentimentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_entity_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_syntax(self, - request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSyntaxResponse: - r"""Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - async def sample_analyze_syntax(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = await client.analyze_syntax(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest, dict]]): - The request object. The syntax analysis request message. - document (:class:`google.cloud.language_v1beta2.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse: - The syntax analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeSyntaxRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_syntax, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def classify_text(self, - request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.ClassifyTextResponse: - r"""Classifies a document into categories. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - async def sample_classify_text(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = await client.classify_text(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1beta2.types.ClassifyTextRequest, dict]]): - The request object. The document classification request - message. - document (:class:`google.cloud.language_v1beta2.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.ClassifyTextResponse: - The document classification response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.ClassifyTextRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.classify_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def moderate_text(self, - request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.ModerateTextResponse: - r"""Moderates a document for harmful and sensitive - categories. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - async def sample_moderate_text(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.ModerateTextRequest( - document=document, - ) - - # Make the request - response = await client.moderate_text(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1beta2.types.ModerateTextRequest, dict]]): - The request object. The document moderation request - message. - document (:class:`google.cloud.language_v1beta2.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.ModerateTextResponse: - The document moderation response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.ModerateTextRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.moderate_text, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def annotate_text(self, - request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - features: Optional[language_service.AnnotateTextRequest.Features] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnnotateTextResponse: - r"""A convenience method that provides all syntax, - sentiment, entity, and classification features in one - call. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - async def sample_annotate_text(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = await client.annotate_text(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1beta2.types.AnnotateTextRequest, dict]]): - The request object. The request message for the text - annotation API, which can perform - multiple analysis types (sentiment, - entities, and syntax) in one call. - document (:class:`google.cloud.language_v1beta2.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - features (:class:`google.cloud.language_v1beta2.types.AnnotateTextRequest.Features`): - Required. The enabled features. - This corresponds to the ``features`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnnotateTextResponse: - The text annotations response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, features, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnnotateTextRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if features is not None: - request.features = features - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.annotate_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "LanguageServiceAsyncClient", -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py deleted file mode 100644 index 9093d5b6..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py +++ /dev/null @@ -1,1116 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.language_v1beta2 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.language_v1beta2.types import language_service -from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import LanguageServiceGrpcTransport -from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport -from .transports.rest import LanguageServiceRestTransport - - -class LanguageServiceClientMeta(type): - """Metaclass for the LanguageService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] - _transport_registry["grpc"] = LanguageServiceGrpcTransport - _transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport - _transport_registry["rest"] = LanguageServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[LanguageServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class LanguageServiceClient(metaclass=LanguageServiceClientMeta): - """Provides text analysis operations such as sentiment analysis - and entity recognition. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "language.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> LanguageServiceTransport: - """Returns the transport used by the client instance. - - Returns: - LanguageServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LanguageServiceTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the language service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, LanguageServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, LanguageServiceTransport): - # transport is a LanguageServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def analyze_sentiment(self, - request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSentimentResponse: - r"""Analyzes the sentiment of the provided text. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - def sample_analyze_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1beta2.types.AnalyzeSentimentRequest, dict]): - The request object. The sentiment analysis request - message. - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate sentence offsets for the - sentence sentiment. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeSentimentResponse: - The sentiment analysis response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeSentimentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeSentimentRequest): - request = language_service.AnalyzeSentimentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_sentiment] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_entities(self, - request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitiesResponse: - r"""Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - def sample_analyze_entities(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = client.analyze_entities(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest, dict]): - The request object. The entity analysis request message. - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse: - The entity analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeEntitiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeEntitiesRequest): - request = language_service.AnalyzeEntitiesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_entities] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_entity_sentiment(self, - request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitySentimentResponse: - r"""Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest, dict]): - The request object. The entity-level sentiment analysis - request message. - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse: - The entity-level sentiment analysis - response message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeEntitySentimentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeEntitySentimentRequest): - request = language_service.AnalyzeEntitySentimentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_entity_sentiment] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_syntax(self, - request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSyntaxResponse: - r"""Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - def sample_analyze_syntax(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = client.analyze_syntax(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest, dict]): - The request object. The syntax analysis request message. - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse: - The syntax analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeSyntaxRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeSyntaxRequest): - request = language_service.AnalyzeSyntaxRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_syntax] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def classify_text(self, - request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.ClassifyTextResponse: - r"""Classifies a document into categories. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - def sample_classify_text(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = client.classify_text(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1beta2.types.ClassifyTextRequest, dict]): - The request object. The document classification request - message. - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.ClassifyTextResponse: - The document classification response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.ClassifyTextRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.ClassifyTextRequest): - request = language_service.ClassifyTextRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.classify_text] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def moderate_text(self, - request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.ModerateTextResponse: - r"""Moderates a document for harmful and sensitive - categories. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - def sample_moderate_text(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.ModerateTextRequest( - document=document, - ) - - # Make the request - response = client.moderate_text(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1beta2.types.ModerateTextRequest, dict]): - The request object. The document moderation request - message. - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.ModerateTextResponse: - The document moderation response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.ModerateTextRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.ModerateTextRequest): - request = language_service.ModerateTextRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.moderate_text] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def annotate_text(self, - request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - features: Optional[language_service.AnnotateTextRequest.Features] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnnotateTextResponse: - r"""A convenience method that provides all syntax, - sentiment, entity, and classification features in one - call. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - def sample_annotate_text(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = client.annotate_text(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1beta2.types.AnnotateTextRequest, dict]): - The request object. The request message for the text - annotation API, which can perform - multiple analysis types (sentiment, - entities, and syntax) in one call. - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - features (google.cloud.language_v1beta2.types.AnnotateTextRequest.Features): - Required. The enabled features. - This corresponds to the ``features`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnnotateTextResponse: - The text annotations response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, features, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnnotateTextRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnnotateTextRequest): - request = language_service.AnnotateTextRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if features is not None: - request.features = features - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.annotate_text] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "LanguageServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "LanguageServiceClient", -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py deleted file mode 100644 index 3cb6ab92..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import LanguageServiceTransport -from .grpc import LanguageServiceGrpcTransport -from .grpc_asyncio import LanguageServiceGrpcAsyncIOTransport -from .rest import LanguageServiceRestTransport -from .rest import LanguageServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] -_transport_registry['grpc'] = LanguageServiceGrpcTransport -_transport_registry['grpc_asyncio'] = LanguageServiceGrpcAsyncIOTransport -_transport_registry['rest'] = LanguageServiceRestTransport - -__all__ = ( - 'LanguageServiceTransport', - 'LanguageServiceGrpcTransport', - 'LanguageServiceGrpcAsyncIOTransport', - 'LanguageServiceRestTransport', - 'LanguageServiceRestInterceptor', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py deleted file mode 100644 index 99ee1db2..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py +++ /dev/null @@ -1,275 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.language_v1beta2 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.language_v1beta2.types import language_service - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class LanguageServiceTransport(abc.ABC): - """Abstract transport class for LanguageService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'language.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.analyze_sentiment: gapic_v1.method.wrap_method( - self.analyze_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.analyze_entities: gapic_v1.method.wrap_method( - self.analyze_entities, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.analyze_entity_sentiment: gapic_v1.method.wrap_method( - self.analyze_entity_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.analyze_syntax: gapic_v1.method.wrap_method( - self.analyze_syntax, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.classify_text: gapic_v1.method.wrap_method( - self.classify_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.moderate_text: gapic_v1.method.wrap_method( - self.moderate_text, - default_timeout=None, - client_info=client_info, - ), - self.annotate_text: gapic_v1.method.wrap_method( - self.annotate_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - Union[ - language_service.AnalyzeSentimentResponse, - Awaitable[language_service.AnalyzeSentimentResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - Union[ - language_service.AnalyzeEntitiesResponse, - Awaitable[language_service.AnalyzeEntitiesResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - Union[ - language_service.AnalyzeEntitySentimentResponse, - Awaitable[language_service.AnalyzeEntitySentimentResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - Union[ - language_service.AnalyzeSyntaxResponse, - Awaitable[language_service.AnalyzeSyntaxResponse] - ]]: - raise NotImplementedError() - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - Union[ - language_service.ClassifyTextResponse, - Awaitable[language_service.ClassifyTextResponse] - ]]: - raise NotImplementedError() - - @property - def moderate_text(self) -> Callable[ - [language_service.ModerateTextRequest], - Union[ - language_service.ModerateTextResponse, - Awaitable[language_service.ModerateTextResponse] - ]]: - raise NotImplementedError() - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - Union[ - language_service.AnnotateTextResponse, - Awaitable[language_service.AnnotateTextResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'LanguageServiceTransport', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py deleted file mode 100644 index 48b7cd8b..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py +++ /dev/null @@ -1,432 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.language_v1beta2.types import language_service -from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO - - -class LanguageServiceGrpcTransport(LanguageServiceTransport): - """gRPC backend transport for LanguageService. - - Provides text analysis operations such as sentiment analysis - and entity recognition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - language_service.AnalyzeSentimentResponse]: - r"""Return a callable for the analyze sentiment method over gRPC. - - Analyzes the sentiment of the provided text. - - Returns: - Callable[[~.AnalyzeSentimentRequest], - ~.AnalyzeSentimentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_sentiment' not in self._stubs: - self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment', - request_serializer=language_service.AnalyzeSentimentRequest.serialize, - response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, - ) - return self._stubs['analyze_sentiment'] - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - language_service.AnalyzeEntitiesResponse]: - r"""Return a callable for the analyze entities method over gRPC. - - Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - Returns: - Callable[[~.AnalyzeEntitiesRequest], - ~.AnalyzeEntitiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entities' not in self._stubs: - self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities', - request_serializer=language_service.AnalyzeEntitiesRequest.serialize, - response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, - ) - return self._stubs['analyze_entities'] - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - language_service.AnalyzeEntitySentimentResponse]: - r"""Return a callable for the analyze entity sentiment method over gRPC. - - Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - Returns: - Callable[[~.AnalyzeEntitySentimentRequest], - ~.AnalyzeEntitySentimentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entity_sentiment' not in self._stubs: - self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment', - request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, - response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, - ) - return self._stubs['analyze_entity_sentiment'] - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - language_service.AnalyzeSyntaxResponse]: - r"""Return a callable for the analyze syntax method over gRPC. - - Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - Returns: - Callable[[~.AnalyzeSyntaxRequest], - ~.AnalyzeSyntaxResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_syntax' not in self._stubs: - self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax', - request_serializer=language_service.AnalyzeSyntaxRequest.serialize, - response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, - ) - return self._stubs['analyze_syntax'] - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - language_service.ClassifyTextResponse]: - r"""Return a callable for the classify text method over gRPC. - - Classifies a document into categories. - - Returns: - Callable[[~.ClassifyTextRequest], - ~.ClassifyTextResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'classify_text' not in self._stubs: - self._stubs['classify_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/ClassifyText', - request_serializer=language_service.ClassifyTextRequest.serialize, - response_deserializer=language_service.ClassifyTextResponse.deserialize, - ) - return self._stubs['classify_text'] - - @property - def moderate_text(self) -> Callable[ - [language_service.ModerateTextRequest], - language_service.ModerateTextResponse]: - r"""Return a callable for the moderate text method over gRPC. - - Moderates a document for harmful and sensitive - categories. - - Returns: - Callable[[~.ModerateTextRequest], - ~.ModerateTextResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'moderate_text' not in self._stubs: - self._stubs['moderate_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/ModerateText', - request_serializer=language_service.ModerateTextRequest.serialize, - response_deserializer=language_service.ModerateTextResponse.deserialize, - ) - return self._stubs['moderate_text'] - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - language_service.AnnotateTextResponse]: - r"""Return a callable for the annotate text method over gRPC. - - A convenience method that provides all syntax, - sentiment, entity, and classification features in one - call. - - Returns: - Callable[[~.AnnotateTextRequest], - ~.AnnotateTextResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'annotate_text' not in self._stubs: - self._stubs['annotate_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnnotateText', - request_serializer=language_service.AnnotateTextRequest.serialize, - response_deserializer=language_service.AnnotateTextResponse.deserialize, - ) - return self._stubs['annotate_text'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'LanguageServiceGrpcTransport', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py deleted file mode 100644 index 710e8bb5..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,431 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.language_v1beta2.types import language_service -from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import LanguageServiceGrpcTransport - - -class LanguageServiceGrpcAsyncIOTransport(LanguageServiceTransport): - """gRPC AsyncIO backend transport for LanguageService. - - Provides text analysis operations such as sentiment analysis - and entity recognition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - Awaitable[language_service.AnalyzeSentimentResponse]]: - r"""Return a callable for the analyze sentiment method over gRPC. - - Analyzes the sentiment of the provided text. - - Returns: - Callable[[~.AnalyzeSentimentRequest], - Awaitable[~.AnalyzeSentimentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_sentiment' not in self._stubs: - self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment', - request_serializer=language_service.AnalyzeSentimentRequest.serialize, - response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, - ) - return self._stubs['analyze_sentiment'] - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - Awaitable[language_service.AnalyzeEntitiesResponse]]: - r"""Return a callable for the analyze entities method over gRPC. - - Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - Returns: - Callable[[~.AnalyzeEntitiesRequest], - Awaitable[~.AnalyzeEntitiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entities' not in self._stubs: - self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities', - request_serializer=language_service.AnalyzeEntitiesRequest.serialize, - response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, - ) - return self._stubs['analyze_entities'] - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - Awaitable[language_service.AnalyzeEntitySentimentResponse]]: - r"""Return a callable for the analyze entity sentiment method over gRPC. - - Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - Returns: - Callable[[~.AnalyzeEntitySentimentRequest], - Awaitable[~.AnalyzeEntitySentimentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entity_sentiment' not in self._stubs: - self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment', - request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, - response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, - ) - return self._stubs['analyze_entity_sentiment'] - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - Awaitable[language_service.AnalyzeSyntaxResponse]]: - r"""Return a callable for the analyze syntax method over gRPC. - - Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - Returns: - Callable[[~.AnalyzeSyntaxRequest], - Awaitable[~.AnalyzeSyntaxResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_syntax' not in self._stubs: - self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax', - request_serializer=language_service.AnalyzeSyntaxRequest.serialize, - response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, - ) - return self._stubs['analyze_syntax'] - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - Awaitable[language_service.ClassifyTextResponse]]: - r"""Return a callable for the classify text method over gRPC. - - Classifies a document into categories. - - Returns: - Callable[[~.ClassifyTextRequest], - Awaitable[~.ClassifyTextResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'classify_text' not in self._stubs: - self._stubs['classify_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/ClassifyText', - request_serializer=language_service.ClassifyTextRequest.serialize, - response_deserializer=language_service.ClassifyTextResponse.deserialize, - ) - return self._stubs['classify_text'] - - @property - def moderate_text(self) -> Callable[ - [language_service.ModerateTextRequest], - Awaitable[language_service.ModerateTextResponse]]: - r"""Return a callable for the moderate text method over gRPC. - - Moderates a document for harmful and sensitive - categories. - - Returns: - Callable[[~.ModerateTextRequest], - Awaitable[~.ModerateTextResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'moderate_text' not in self._stubs: - self._stubs['moderate_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/ModerateText', - request_serializer=language_service.ModerateTextRequest.serialize, - response_deserializer=language_service.ModerateTextResponse.deserialize, - ) - return self._stubs['moderate_text'] - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - Awaitable[language_service.AnnotateTextResponse]]: - r"""Return a callable for the annotate text method over gRPC. - - A convenience method that provides all syntax, - sentiment, entity, and classification features in one - call. - - Returns: - Callable[[~.AnnotateTextRequest], - Awaitable[~.AnnotateTextResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'annotate_text' not in self._stubs: - self._stubs['annotate_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnnotateText', - request_serializer=language_service.AnnotateTextRequest.serialize, - response_deserializer=language_service.AnnotateTextResponse.deserialize, - ) - return self._stubs['annotate_text'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'LanguageServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py deleted file mode 100644 index 9696c821..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py +++ /dev/null @@ -1,1029 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.language_v1beta2.types import language_service - -from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class LanguageServiceRestInterceptor: - """Interceptor for LanguageService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the LanguageServiceRestTransport. - - .. code-block:: python - class MyCustomLanguageServiceInterceptor(LanguageServiceRestInterceptor): - def pre_analyze_entities(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_entities(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_entity_sentiment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_entity_sentiment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_sentiment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_sentiment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_syntax(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_syntax(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_annotate_text(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_annotate_text(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_classify_text(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_classify_text(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_moderate_text(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_moderate_text(self, response): - logging.log(f"Received response: {response}") - return response - - transport = LanguageServiceRestTransport(interceptor=MyCustomLanguageServiceInterceptor()) - client = LanguageServiceClient(transport=transport) - - - """ - def pre_analyze_entities(self, request: language_service.AnalyzeEntitiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitiesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_entities - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_entities(self, response: language_service.AnalyzeEntitiesResponse) -> language_service.AnalyzeEntitiesResponse: - """Post-rpc interceptor for analyze_entities - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_analyze_entity_sentiment(self, request: language_service.AnalyzeEntitySentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitySentimentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_entity_sentiment - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_entity_sentiment(self, response: language_service.AnalyzeEntitySentimentResponse) -> language_service.AnalyzeEntitySentimentResponse: - """Post-rpc interceptor for analyze_entity_sentiment - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_analyze_sentiment(self, request: language_service.AnalyzeSentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSentimentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_sentiment - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_sentiment(self, response: language_service.AnalyzeSentimentResponse) -> language_service.AnalyzeSentimentResponse: - """Post-rpc interceptor for analyze_sentiment - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_analyze_syntax(self, request: language_service.AnalyzeSyntaxRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSyntaxRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_syntax - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_syntax(self, response: language_service.AnalyzeSyntaxResponse) -> language_service.AnalyzeSyntaxResponse: - """Post-rpc interceptor for analyze_syntax - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_annotate_text(self, request: language_service.AnnotateTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnnotateTextRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for annotate_text - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_annotate_text(self, response: language_service.AnnotateTextResponse) -> language_service.AnnotateTextResponse: - """Post-rpc interceptor for annotate_text - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_classify_text(self, request: language_service.ClassifyTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.ClassifyTextRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for classify_text - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_classify_text(self, response: language_service.ClassifyTextResponse) -> language_service.ClassifyTextResponse: - """Post-rpc interceptor for classify_text - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_moderate_text(self, request: language_service.ModerateTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.ModerateTextRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for moderate_text - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_moderate_text(self, response: language_service.ModerateTextResponse) -> language_service.ModerateTextResponse: - """Post-rpc interceptor for moderate_text - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class LanguageServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: LanguageServiceRestInterceptor - - -class LanguageServiceRestTransport(LanguageServiceTransport): - """REST backend transport for LanguageService. - - Provides text analysis operations such as sentiment analysis - and entity recognition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[LanguageServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or LanguageServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _AnalyzeEntities(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeEntities") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeEntitiesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeEntitiesResponse: - r"""Call the analyze entities method over HTTP. - - Args: - request (~.language_service.AnalyzeEntitiesRequest): - The request object. The entity analysis request message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeEntitiesResponse: - The entity analysis response message. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta2/documents:analyzeEntities', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_entities(request, metadata) - pb_request = language_service.AnalyzeEntitiesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeEntitiesResponse() - pb_resp = language_service.AnalyzeEntitiesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_entities(resp) - return resp - - class _AnalyzeEntitySentiment(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeEntitySentiment") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeEntitySentimentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeEntitySentimentResponse: - r"""Call the analyze entity sentiment method over HTTP. - - Args: - request (~.language_service.AnalyzeEntitySentimentRequest): - The request object. The entity-level sentiment analysis - request message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeEntitySentimentResponse: - The entity-level sentiment analysis - response message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta2/documents:analyzeEntitySentiment', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_entity_sentiment(request, metadata) - pb_request = language_service.AnalyzeEntitySentimentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeEntitySentimentResponse() - pb_resp = language_service.AnalyzeEntitySentimentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_entity_sentiment(resp) - return resp - - class _AnalyzeSentiment(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeSentiment") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeSentimentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeSentimentResponse: - r"""Call the analyze sentiment method over HTTP. - - Args: - request (~.language_service.AnalyzeSentimentRequest): - The request object. The sentiment analysis request - message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeSentimentResponse: - The sentiment analysis response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta2/documents:analyzeSentiment', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_sentiment(request, metadata) - pb_request = language_service.AnalyzeSentimentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeSentimentResponse() - pb_resp = language_service.AnalyzeSentimentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_sentiment(resp) - return resp - - class _AnalyzeSyntax(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeSyntax") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeSyntaxRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeSyntaxResponse: - r"""Call the analyze syntax method over HTTP. - - Args: - request (~.language_service.AnalyzeSyntaxRequest): - The request object. The syntax analysis request message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeSyntaxResponse: - The syntax analysis response message. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta2/documents:analyzeSyntax', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_syntax(request, metadata) - pb_request = language_service.AnalyzeSyntaxRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeSyntaxResponse() - pb_resp = language_service.AnalyzeSyntaxResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_syntax(resp) - return resp - - class _AnnotateText(LanguageServiceRestStub): - def __hash__(self): - return hash("AnnotateText") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnnotateTextRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnnotateTextResponse: - r"""Call the annotate text method over HTTP. - - Args: - request (~.language_service.AnnotateTextRequest): - The request object. The request message for the text - annotation API, which can perform - multiple analysis types (sentiment, - entities, and syntax) in one call. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnnotateTextResponse: - The text annotations response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta2/documents:annotateText', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_annotate_text(request, metadata) - pb_request = language_service.AnnotateTextRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnnotateTextResponse() - pb_resp = language_service.AnnotateTextResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_annotate_text(resp) - return resp - - class _ClassifyText(LanguageServiceRestStub): - def __hash__(self): - return hash("ClassifyText") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.ClassifyTextRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.ClassifyTextResponse: - r"""Call the classify text method over HTTP. - - Args: - request (~.language_service.ClassifyTextRequest): - The request object. The document classification request - message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.ClassifyTextResponse: - The document classification response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta2/documents:classifyText', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_classify_text(request, metadata) - pb_request = language_service.ClassifyTextRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.ClassifyTextResponse() - pb_resp = language_service.ClassifyTextResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_classify_text(resp) - return resp - - class _ModerateText(LanguageServiceRestStub): - def __hash__(self): - return hash("ModerateText") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.ModerateTextRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.ModerateTextResponse: - r"""Call the moderate text method over HTTP. - - Args: - request (~.language_service.ModerateTextRequest): - The request object. The document moderation request - message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.ModerateTextResponse: - The document moderation response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta2/documents:moderateText', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_moderate_text(request, metadata) - pb_request = language_service.ModerateTextRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.ModerateTextResponse() - pb_resp = language_service.ModerateTextResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_moderate_text(resp) - return resp - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - language_service.AnalyzeEntitiesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeEntities(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - language_service.AnalyzeEntitySentimentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeEntitySentiment(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - language_service.AnalyzeSentimentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeSentiment(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - language_service.AnalyzeSyntaxResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeSyntax(self._session, self._host, self._interceptor) # type: ignore - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - language_service.AnnotateTextResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnnotateText(self._session, self._host, self._interceptor) # type: ignore - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - language_service.ClassifyTextResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ClassifyText(self._session, self._host, self._interceptor) # type: ignore - - @property - def moderate_text(self) -> Callable[ - [language_service.ModerateTextRequest], - language_service.ModerateTextResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ModerateText(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'LanguageServiceRestTransport', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py deleted file mode 100644 index 8dadfa8a..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .language_service import ( - AnalyzeEntitiesRequest, - AnalyzeEntitiesResponse, - AnalyzeEntitySentimentRequest, - AnalyzeEntitySentimentResponse, - AnalyzeSentimentRequest, - AnalyzeSentimentResponse, - AnalyzeSyntaxRequest, - AnalyzeSyntaxResponse, - AnnotateTextRequest, - AnnotateTextResponse, - ClassificationCategory, - ClassificationModelOptions, - ClassifyTextRequest, - ClassifyTextResponse, - DependencyEdge, - Document, - Entity, - EntityMention, - ModerateTextRequest, - ModerateTextResponse, - PartOfSpeech, - Sentence, - Sentiment, - TextSpan, - Token, - EncodingType, -) - -__all__ = ( - 'AnalyzeEntitiesRequest', - 'AnalyzeEntitiesResponse', - 'AnalyzeEntitySentimentRequest', - 'AnalyzeEntitySentimentResponse', - 'AnalyzeSentimentRequest', - 'AnalyzeSentimentResponse', - 'AnalyzeSyntaxRequest', - 'AnalyzeSyntaxResponse', - 'AnnotateTextRequest', - 'AnnotateTextResponse', - 'ClassificationCategory', - 'ClassificationModelOptions', - 'ClassifyTextRequest', - 'ClassifyTextResponse', - 'DependencyEdge', - 'Document', - 'Entity', - 'EntityMention', - 'ModerateTextRequest', - 'ModerateTextResponse', - 'PartOfSpeech', - 'Sentence', - 'Sentiment', - 'TextSpan', - 'Token', - 'EncodingType', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py deleted file mode 100644 index 3b27605f..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py +++ /dev/null @@ -1,1761 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.language.v1beta2', - manifest={ - 'EncodingType', - 'Document', - 'Sentence', - 'Entity', - 'Token', - 'Sentiment', - 'PartOfSpeech', - 'DependencyEdge', - 'EntityMention', - 'TextSpan', - 'ClassificationCategory', - 'ClassificationModelOptions', - 'AnalyzeSentimentRequest', - 'AnalyzeSentimentResponse', - 'AnalyzeEntitySentimentRequest', - 'AnalyzeEntitySentimentResponse', - 'AnalyzeEntitiesRequest', - 'AnalyzeEntitiesResponse', - 'AnalyzeSyntaxRequest', - 'AnalyzeSyntaxResponse', - 'ClassifyTextRequest', - 'ClassifyTextResponse', - 'ModerateTextRequest', - 'ModerateTextResponse', - 'AnnotateTextRequest', - 'AnnotateTextResponse', - }, -) - - -class EncodingType(proto.Enum): - r"""Represents the text encoding that the caller uses to process the - output. Providing an ``EncodingType`` is recommended because the API - provides the beginning offsets for various outputs, such as tokens - and mentions, and languages that natively use different text - encodings may access offsets differently. - - Values: - NONE (0): - If ``EncodingType`` is not specified, encoding-dependent - information (such as ``begin_offset``) will be set at - ``-1``. - UTF8 (1): - Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-8 encoding of the input. C++ and - Go are examples of languages that use this encoding - natively. - UTF16 (2): - Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-16 encoding of the input. Java - and JavaScript are examples of languages that use this - encoding natively. - UTF32 (3): - Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-32 encoding of the input. Python - is an example of a language that uses this encoding - natively. - """ - NONE = 0 - UTF8 = 1 - UTF16 = 2 - UTF32 = 3 - - -class Document(proto.Message): - r"""Represents the input to API methods. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - type_ (google.cloud.language_v1beta2.types.Document.Type): - Required. If the type is not set or is ``TYPE_UNSPECIFIED``, - returns an ``INVALID_ARGUMENT`` error. - content (str): - The content of the input in string format. - Cloud audit logging exempt since it is based on - user data. - - This field is a member of `oneof`_ ``source``. - gcs_content_uri (str): - The Google Cloud Storage URI where the file content is - located. This URI must be of the form: - gs://bucket_name/object_name. For more details, see - https://cloud.google.com/storage/docs/reference-uris. NOTE: - Cloud Storage object versioning is not supported. - - This field is a member of `oneof`_ ``source``. - language (str): - The language of the document (if not specified, the language - is automatically detected). Both ISO and BCP-47 language - codes are accepted. `Language - Support `__ - lists currently supported languages for each API method. If - the language (either specified by the caller or - automatically detected) is not supported by the called API - method, an ``INVALID_ARGUMENT`` error is returned. - reference_web_uri (str): - The web URI where the document comes from. - This URI is not used for fetching the content, - but as a hint for analyzing the document. - boilerplate_handling (google.cloud.language_v1beta2.types.Document.BoilerplateHandling): - Indicates how detected boilerplate(e.g. - advertisements, copyright declarations, banners) - should be handled for this document. If not - specified, boilerplate will be treated the same - as content. - """ - class Type(proto.Enum): - r"""The document types enum. - - Values: - TYPE_UNSPECIFIED (0): - The content type is not specified. - PLAIN_TEXT (1): - Plain text - HTML (2): - HTML - """ - TYPE_UNSPECIFIED = 0 - PLAIN_TEXT = 1 - HTML = 2 - - class BoilerplateHandling(proto.Enum): - r"""Ways of handling boilerplate detected in the document - - Values: - BOILERPLATE_HANDLING_UNSPECIFIED (0): - The boilerplate handling is not specified. - SKIP_BOILERPLATE (1): - Do not analyze detected boilerplate. - Reference web URI is required for detecting - boilerplate. - KEEP_BOILERPLATE (2): - Treat boilerplate the same as content. - """ - BOILERPLATE_HANDLING_UNSPECIFIED = 0 - SKIP_BOILERPLATE = 1 - KEEP_BOILERPLATE = 2 - - type_: Type = proto.Field( - proto.ENUM, - number=1, - enum=Type, - ) - content: str = proto.Field( - proto.STRING, - number=2, - oneof='source', - ) - gcs_content_uri: str = proto.Field( - proto.STRING, - number=3, - oneof='source', - ) - language: str = proto.Field( - proto.STRING, - number=4, - ) - reference_web_uri: str = proto.Field( - proto.STRING, - number=5, - ) - boilerplate_handling: BoilerplateHandling = proto.Field( - proto.ENUM, - number=6, - enum=BoilerplateHandling, - ) - - -class Sentence(proto.Message): - r"""Represents a sentence in the input document. - - Attributes: - text (google.cloud.language_v1beta2.types.TextSpan): - The sentence text. - sentiment (google.cloud.language_v1beta2.types.Sentiment): - For calls to [AnalyzeSentiment][] or if - [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] - is set to true, this field will contain the sentiment for - the sentence. - """ - - text: 'TextSpan' = proto.Field( - proto.MESSAGE, - number=1, - message='TextSpan', - ) - sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=2, - message='Sentiment', - ) - - -class Entity(proto.Message): - r"""Represents a phrase in the text that is a known entity, such - as a person, an organization, or location. The API associates - information, such as salience and mentions, with entities. - - Attributes: - name (str): - The representative name for the entity. - type_ (google.cloud.language_v1beta2.types.Entity.Type): - The entity type. - metadata (MutableMapping[str, str]): - Metadata associated with the entity. - - For most entity types, the metadata is a Wikipedia URL - (``wikipedia_url``) and Knowledge Graph MID (``mid``), if - they are available. For the metadata associated with other - entity types, see the Type table below. - salience (float): - The salience score associated with the entity in the [0, - 1.0] range. - - The salience score for an entity provides information about - the importance or centrality of that entity to the entire - document text. Scores closer to 0 are less salient, while - scores closer to 1.0 are highly salient. - mentions (MutableSequence[google.cloud.language_v1beta2.types.EntityMention]): - The mentions of this entity in the input - document. The API currently supports proper noun - mentions. - sentiment (google.cloud.language_v1beta2.types.Sentiment): - For calls to [AnalyzeEntitySentiment][] or if - [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] - is set to true, this field will contain the aggregate - sentiment expressed for this entity in the provided - document. - """ - class Type(proto.Enum): - r"""The type of the entity. For most entity types, the associated - metadata is a Wikipedia URL (``wikipedia_url``) and Knowledge Graph - MID (``mid``). The table below lists the associated fields for - entities that have different metadata. - - Values: - UNKNOWN (0): - Unknown - PERSON (1): - Person - LOCATION (2): - Location - ORGANIZATION (3): - Organization - EVENT (4): - Event - WORK_OF_ART (5): - Artwork - CONSUMER_GOOD (6): - Consumer product - OTHER (7): - Other types of entities - PHONE_NUMBER (9): - Phone number - - The metadata lists the phone number, formatted according to - local convention, plus whichever additional elements appear - in the text: - - - ``number`` - the actual number, broken down into sections - as per local convention - - ``national_prefix`` - country code, if detected - - ``area_code`` - region or area code, if detected - - ``extension`` - phone extension (to be dialed after - connection), if detected - ADDRESS (10): - Address - - The metadata identifies the street number and locality plus - whichever additional elements appear in the text: - - - ``street_number`` - street number - - ``locality`` - city or town - - ``street_name`` - street/route name, if detected - - ``postal_code`` - postal code, if detected - - ``country`` - country, if detected< - - ``broad_region`` - administrative area, such as the - state, if detected - - ``narrow_region`` - smaller administrative area, such as - county, if detected - - ``sublocality`` - used in Asian addresses to demark a - district within a city, if detected - DATE (11): - Date - - The metadata identifies the components of the date: - - - ``year`` - four digit year, if detected - - ``month`` - two digit month number, if detected - - ``day`` - two digit day number, if detected - NUMBER (12): - Number - The metadata is the number itself. - PRICE (13): - Price - - The metadata identifies the ``value`` and ``currency``. - """ - UNKNOWN = 0 - PERSON = 1 - LOCATION = 2 - ORGANIZATION = 3 - EVENT = 4 - WORK_OF_ART = 5 - CONSUMER_GOOD = 6 - OTHER = 7 - PHONE_NUMBER = 9 - ADDRESS = 10 - DATE = 11 - NUMBER = 12 - PRICE = 13 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: Type = proto.Field( - proto.ENUM, - number=2, - enum=Type, - ) - metadata: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - salience: float = proto.Field( - proto.FLOAT, - number=4, - ) - mentions: MutableSequence['EntityMention'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='EntityMention', - ) - sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=6, - message='Sentiment', - ) - - -class Token(proto.Message): - r"""Represents the smallest syntactic building block of the text. - - Attributes: - text (google.cloud.language_v1beta2.types.TextSpan): - The token text. - part_of_speech (google.cloud.language_v1beta2.types.PartOfSpeech): - Parts of speech tag for this token. - dependency_edge (google.cloud.language_v1beta2.types.DependencyEdge): - Dependency tree parse for this token. - lemma (str): - `Lemma `__ - of the token. - """ - - text: 'TextSpan' = proto.Field( - proto.MESSAGE, - number=1, - message='TextSpan', - ) - part_of_speech: 'PartOfSpeech' = proto.Field( - proto.MESSAGE, - number=2, - message='PartOfSpeech', - ) - dependency_edge: 'DependencyEdge' = proto.Field( - proto.MESSAGE, - number=3, - message='DependencyEdge', - ) - lemma: str = proto.Field( - proto.STRING, - number=4, - ) - - -class Sentiment(proto.Message): - r"""Represents the feeling associated with the entire text or - entities in the text. - Next ID: 6 - - Attributes: - magnitude (float): - A non-negative number in the [0, +inf) range, which - represents the absolute magnitude of sentiment regardless of - score (positive or negative). - score (float): - Sentiment score between -1.0 (negative - sentiment) and 1.0 (positive sentiment). - """ - - magnitude: float = proto.Field( - proto.FLOAT, - number=2, - ) - score: float = proto.Field( - proto.FLOAT, - number=3, - ) - - -class PartOfSpeech(proto.Message): - r"""Represents part of speech information for a token. - - Attributes: - tag (google.cloud.language_v1beta2.types.PartOfSpeech.Tag): - The part of speech tag. - aspect (google.cloud.language_v1beta2.types.PartOfSpeech.Aspect): - The grammatical aspect. - case (google.cloud.language_v1beta2.types.PartOfSpeech.Case): - The grammatical case. - form (google.cloud.language_v1beta2.types.PartOfSpeech.Form): - The grammatical form. - gender (google.cloud.language_v1beta2.types.PartOfSpeech.Gender): - The grammatical gender. - mood (google.cloud.language_v1beta2.types.PartOfSpeech.Mood): - The grammatical mood. - number (google.cloud.language_v1beta2.types.PartOfSpeech.Number): - The grammatical number. - person (google.cloud.language_v1beta2.types.PartOfSpeech.Person): - The grammatical person. - proper (google.cloud.language_v1beta2.types.PartOfSpeech.Proper): - The grammatical properness. - reciprocity (google.cloud.language_v1beta2.types.PartOfSpeech.Reciprocity): - The grammatical reciprocity. - tense (google.cloud.language_v1beta2.types.PartOfSpeech.Tense): - The grammatical tense. - voice (google.cloud.language_v1beta2.types.PartOfSpeech.Voice): - The grammatical voice. - """ - class Tag(proto.Enum): - r"""The part of speech tags enum. - - Values: - UNKNOWN (0): - Unknown - ADJ (1): - Adjective - ADP (2): - Adposition (preposition and postposition) - ADV (3): - Adverb - CONJ (4): - Conjunction - DET (5): - Determiner - NOUN (6): - Noun (common and proper) - NUM (7): - Cardinal number - PRON (8): - Pronoun - PRT (9): - Particle or other function word - PUNCT (10): - Punctuation - VERB (11): - Verb (all tenses and modes) - X (12): - Other: foreign words, typos, abbreviations - AFFIX (13): - Affix - """ - UNKNOWN = 0 - ADJ = 1 - ADP = 2 - ADV = 3 - CONJ = 4 - DET = 5 - NOUN = 6 - NUM = 7 - PRON = 8 - PRT = 9 - PUNCT = 10 - VERB = 11 - X = 12 - AFFIX = 13 - - class Aspect(proto.Enum): - r"""The characteristic of a verb that expresses time flow during - an event. - - Values: - ASPECT_UNKNOWN (0): - Aspect is not applicable in the analyzed - language or is not predicted. - PERFECTIVE (1): - Perfective - IMPERFECTIVE (2): - Imperfective - PROGRESSIVE (3): - Progressive - """ - ASPECT_UNKNOWN = 0 - PERFECTIVE = 1 - IMPERFECTIVE = 2 - PROGRESSIVE = 3 - - class Case(proto.Enum): - r"""The grammatical function performed by a noun or pronoun in a - phrase, clause, or sentence. In some languages, other parts of - speech, such as adjective and determiner, take case inflection - in agreement with the noun. - - Values: - CASE_UNKNOWN (0): - Case is not applicable in the analyzed - language or is not predicted. - ACCUSATIVE (1): - Accusative - ADVERBIAL (2): - Adverbial - COMPLEMENTIVE (3): - Complementive - DATIVE (4): - Dative - GENITIVE (5): - Genitive - INSTRUMENTAL (6): - Instrumental - LOCATIVE (7): - Locative - NOMINATIVE (8): - Nominative - OBLIQUE (9): - Oblique - PARTITIVE (10): - Partitive - PREPOSITIONAL (11): - Prepositional - REFLEXIVE_CASE (12): - Reflexive - RELATIVE_CASE (13): - Relative - VOCATIVE (14): - Vocative - """ - CASE_UNKNOWN = 0 - ACCUSATIVE = 1 - ADVERBIAL = 2 - COMPLEMENTIVE = 3 - DATIVE = 4 - GENITIVE = 5 - INSTRUMENTAL = 6 - LOCATIVE = 7 - NOMINATIVE = 8 - OBLIQUE = 9 - PARTITIVE = 10 - PREPOSITIONAL = 11 - REFLEXIVE_CASE = 12 - RELATIVE_CASE = 13 - VOCATIVE = 14 - - class Form(proto.Enum): - r"""Depending on the language, Form can be categorizing different - forms of verbs, adjectives, adverbs, etc. For example, - categorizing inflected endings of verbs and adjectives or - distinguishing between short and long forms of adjectives and - participles - - Values: - FORM_UNKNOWN (0): - Form is not applicable in the analyzed - language or is not predicted. - ADNOMIAL (1): - Adnomial - AUXILIARY (2): - Auxiliary - COMPLEMENTIZER (3): - Complementizer - FINAL_ENDING (4): - Final ending - GERUND (5): - Gerund - REALIS (6): - Realis - IRREALIS (7): - Irrealis - SHORT (8): - Short form - LONG (9): - Long form - ORDER (10): - Order form - SPECIFIC (11): - Specific form - """ - FORM_UNKNOWN = 0 - ADNOMIAL = 1 - AUXILIARY = 2 - COMPLEMENTIZER = 3 - FINAL_ENDING = 4 - GERUND = 5 - REALIS = 6 - IRREALIS = 7 - SHORT = 8 - LONG = 9 - ORDER = 10 - SPECIFIC = 11 - - class Gender(proto.Enum): - r"""Gender classes of nouns reflected in the behaviour of - associated words. - - Values: - GENDER_UNKNOWN (0): - Gender is not applicable in the analyzed - language or is not predicted. - FEMININE (1): - Feminine - MASCULINE (2): - Masculine - NEUTER (3): - Neuter - """ - GENDER_UNKNOWN = 0 - FEMININE = 1 - MASCULINE = 2 - NEUTER = 3 - - class Mood(proto.Enum): - r"""The grammatical feature of verbs, used for showing modality - and attitude. - - Values: - MOOD_UNKNOWN (0): - Mood is not applicable in the analyzed - language or is not predicted. - CONDITIONAL_MOOD (1): - Conditional - IMPERATIVE (2): - Imperative - INDICATIVE (3): - Indicative - INTERROGATIVE (4): - Interrogative - JUSSIVE (5): - Jussive - SUBJUNCTIVE (6): - Subjunctive - """ - MOOD_UNKNOWN = 0 - CONDITIONAL_MOOD = 1 - IMPERATIVE = 2 - INDICATIVE = 3 - INTERROGATIVE = 4 - JUSSIVE = 5 - SUBJUNCTIVE = 6 - - class Number(proto.Enum): - r"""Count distinctions. - - Values: - NUMBER_UNKNOWN (0): - Number is not applicable in the analyzed - language or is not predicted. - SINGULAR (1): - Singular - PLURAL (2): - Plural - DUAL (3): - Dual - """ - NUMBER_UNKNOWN = 0 - SINGULAR = 1 - PLURAL = 2 - DUAL = 3 - - class Person(proto.Enum): - r"""The distinction between the speaker, second person, third - person, etc. - - Values: - PERSON_UNKNOWN (0): - Person is not applicable in the analyzed - language or is not predicted. - FIRST (1): - First - SECOND (2): - Second - THIRD (3): - Third - REFLEXIVE_PERSON (4): - Reflexive - """ - PERSON_UNKNOWN = 0 - FIRST = 1 - SECOND = 2 - THIRD = 3 - REFLEXIVE_PERSON = 4 - - class Proper(proto.Enum): - r"""This category shows if the token is part of a proper name. - - Values: - PROPER_UNKNOWN (0): - Proper is not applicable in the analyzed - language or is not predicted. - PROPER (1): - Proper - NOT_PROPER (2): - Not proper - """ - PROPER_UNKNOWN = 0 - PROPER = 1 - NOT_PROPER = 2 - - class Reciprocity(proto.Enum): - r"""Reciprocal features of a pronoun. - - Values: - RECIPROCITY_UNKNOWN (0): - Reciprocity is not applicable in the analyzed - language or is not predicted. - RECIPROCAL (1): - Reciprocal - NON_RECIPROCAL (2): - Non-reciprocal - """ - RECIPROCITY_UNKNOWN = 0 - RECIPROCAL = 1 - NON_RECIPROCAL = 2 - - class Tense(proto.Enum): - r"""Time reference. - - Values: - TENSE_UNKNOWN (0): - Tense is not applicable in the analyzed - language or is not predicted. - CONDITIONAL_TENSE (1): - Conditional - FUTURE (2): - Future - PAST (3): - Past - PRESENT (4): - Present - IMPERFECT (5): - Imperfect - PLUPERFECT (6): - Pluperfect - """ - TENSE_UNKNOWN = 0 - CONDITIONAL_TENSE = 1 - FUTURE = 2 - PAST = 3 - PRESENT = 4 - IMPERFECT = 5 - PLUPERFECT = 6 - - class Voice(proto.Enum): - r"""The relationship between the action that a verb expresses and - the participants identified by its arguments. - - Values: - VOICE_UNKNOWN (0): - Voice is not applicable in the analyzed - language or is not predicted. - ACTIVE (1): - Active - CAUSATIVE (2): - Causative - PASSIVE (3): - Passive - """ - VOICE_UNKNOWN = 0 - ACTIVE = 1 - CAUSATIVE = 2 - PASSIVE = 3 - - tag: Tag = proto.Field( - proto.ENUM, - number=1, - enum=Tag, - ) - aspect: Aspect = proto.Field( - proto.ENUM, - number=2, - enum=Aspect, - ) - case: Case = proto.Field( - proto.ENUM, - number=3, - enum=Case, - ) - form: Form = proto.Field( - proto.ENUM, - number=4, - enum=Form, - ) - gender: Gender = proto.Field( - proto.ENUM, - number=5, - enum=Gender, - ) - mood: Mood = proto.Field( - proto.ENUM, - number=6, - enum=Mood, - ) - number: Number = proto.Field( - proto.ENUM, - number=7, - enum=Number, - ) - person: Person = proto.Field( - proto.ENUM, - number=8, - enum=Person, - ) - proper: Proper = proto.Field( - proto.ENUM, - number=9, - enum=Proper, - ) - reciprocity: Reciprocity = proto.Field( - proto.ENUM, - number=10, - enum=Reciprocity, - ) - tense: Tense = proto.Field( - proto.ENUM, - number=11, - enum=Tense, - ) - voice: Voice = proto.Field( - proto.ENUM, - number=12, - enum=Voice, - ) - - -class DependencyEdge(proto.Message): - r"""Represents dependency parse tree information for a token. - - Attributes: - head_token_index (int): - Represents the head of this token in the dependency tree. - This is the index of the token which has an arc going to - this token. The index is the position of the token in the - array of tokens returned by the API method. If this token is - a root token, then the ``head_token_index`` is its own - index. - label (google.cloud.language_v1beta2.types.DependencyEdge.Label): - The parse label for the token. - """ - class Label(proto.Enum): - r"""The parse label enum for the token. - - Values: - UNKNOWN (0): - Unknown - ABBREV (1): - Abbreviation modifier - ACOMP (2): - Adjectival complement - ADVCL (3): - Adverbial clause modifier - ADVMOD (4): - Adverbial modifier - AMOD (5): - Adjectival modifier of an NP - APPOS (6): - Appositional modifier of an NP - ATTR (7): - Attribute dependent of a copular verb - AUX (8): - Auxiliary (non-main) verb - AUXPASS (9): - Passive auxiliary - CC (10): - Coordinating conjunction - CCOMP (11): - Clausal complement of a verb or adjective - CONJ (12): - Conjunct - CSUBJ (13): - Clausal subject - CSUBJPASS (14): - Clausal passive subject - DEP (15): - Dependency (unable to determine) - DET (16): - Determiner - DISCOURSE (17): - Discourse - DOBJ (18): - Direct object - EXPL (19): - Expletive - GOESWITH (20): - Goes with (part of a word in a text not well - edited) - IOBJ (21): - Indirect object - MARK (22): - Marker (word introducing a subordinate - clause) - MWE (23): - Multi-word expression - MWV (24): - Multi-word verbal expression - NEG (25): - Negation modifier - NN (26): - Noun compound modifier - NPADVMOD (27): - Noun phrase used as an adverbial modifier - NSUBJ (28): - Nominal subject - NSUBJPASS (29): - Passive nominal subject - NUM (30): - Numeric modifier of a noun - NUMBER (31): - Element of compound number - P (32): - Punctuation mark - PARATAXIS (33): - Parataxis relation - PARTMOD (34): - Participial modifier - PCOMP (35): - The complement of a preposition is a clause - POBJ (36): - Object of a preposition - POSS (37): - Possession modifier - POSTNEG (38): - Postverbal negative particle - PRECOMP (39): - Predicate complement - PRECONJ (40): - Preconjunt - PREDET (41): - Predeterminer - PREF (42): - Prefix - PREP (43): - Prepositional modifier - PRONL (44): - The relationship between a verb and verbal - morpheme - PRT (45): - Particle - PS (46): - Associative or possessive marker - QUANTMOD (47): - Quantifier phrase modifier - RCMOD (48): - Relative clause modifier - RCMODREL (49): - Complementizer in relative clause - RDROP (50): - Ellipsis without a preceding predicate - REF (51): - Referent - REMNANT (52): - Remnant - REPARANDUM (53): - Reparandum - ROOT (54): - Root - SNUM (55): - Suffix specifying a unit of number - SUFF (56): - Suffix - TMOD (57): - Temporal modifier - TOPIC (58): - Topic marker - VMOD (59): - Clause headed by an infinite form of the verb - that modifies a noun - VOCATIVE (60): - Vocative - XCOMP (61): - Open clausal complement - SUFFIX (62): - Name suffix - TITLE (63): - Name title - ADVPHMOD (64): - Adverbial phrase modifier - AUXCAUS (65): - Causative auxiliary - AUXVV (66): - Helper auxiliary - DTMOD (67): - Rentaishi (Prenominal modifier) - FOREIGN (68): - Foreign words - KW (69): - Keyword - LIST (70): - List for chains of comparable items - NOMC (71): - Nominalized clause - NOMCSUBJ (72): - Nominalized clausal subject - NOMCSUBJPASS (73): - Nominalized clausal passive - NUMC (74): - Compound of numeric modifier - COP (75): - Copula - DISLOCATED (76): - Dislocated relation (for fronted/topicalized - elements) - ASP (77): - Aspect marker - GMOD (78): - Genitive modifier - GOBJ (79): - Genitive object - INFMOD (80): - Infinitival modifier - MES (81): - Measure - NCOMP (82): - Nominal complement of a noun - """ - UNKNOWN = 0 - ABBREV = 1 - ACOMP = 2 - ADVCL = 3 - ADVMOD = 4 - AMOD = 5 - APPOS = 6 - ATTR = 7 - AUX = 8 - AUXPASS = 9 - CC = 10 - CCOMP = 11 - CONJ = 12 - CSUBJ = 13 - CSUBJPASS = 14 - DEP = 15 - DET = 16 - DISCOURSE = 17 - DOBJ = 18 - EXPL = 19 - GOESWITH = 20 - IOBJ = 21 - MARK = 22 - MWE = 23 - MWV = 24 - NEG = 25 - NN = 26 - NPADVMOD = 27 - NSUBJ = 28 - NSUBJPASS = 29 - NUM = 30 - NUMBER = 31 - P = 32 - PARATAXIS = 33 - PARTMOD = 34 - PCOMP = 35 - POBJ = 36 - POSS = 37 - POSTNEG = 38 - PRECOMP = 39 - PRECONJ = 40 - PREDET = 41 - PREF = 42 - PREP = 43 - PRONL = 44 - PRT = 45 - PS = 46 - QUANTMOD = 47 - RCMOD = 48 - RCMODREL = 49 - RDROP = 50 - REF = 51 - REMNANT = 52 - REPARANDUM = 53 - ROOT = 54 - SNUM = 55 - SUFF = 56 - TMOD = 57 - TOPIC = 58 - VMOD = 59 - VOCATIVE = 60 - XCOMP = 61 - SUFFIX = 62 - TITLE = 63 - ADVPHMOD = 64 - AUXCAUS = 65 - AUXVV = 66 - DTMOD = 67 - FOREIGN = 68 - KW = 69 - LIST = 70 - NOMC = 71 - NOMCSUBJ = 72 - NOMCSUBJPASS = 73 - NUMC = 74 - COP = 75 - DISLOCATED = 76 - ASP = 77 - GMOD = 78 - GOBJ = 79 - INFMOD = 80 - MES = 81 - NCOMP = 82 - - head_token_index: int = proto.Field( - proto.INT32, - number=1, - ) - label: Label = proto.Field( - proto.ENUM, - number=2, - enum=Label, - ) - - -class EntityMention(proto.Message): - r"""Represents a mention for an entity in the text. Currently, - proper noun mentions are supported. - - Attributes: - text (google.cloud.language_v1beta2.types.TextSpan): - The mention text. - type_ (google.cloud.language_v1beta2.types.EntityMention.Type): - The type of the entity mention. - sentiment (google.cloud.language_v1beta2.types.Sentiment): - For calls to [AnalyzeEntitySentiment][] or if - [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] - is set to true, this field will contain the sentiment - expressed for this mention of the entity in the provided - document. - """ - class Type(proto.Enum): - r"""The supported types of mentions. - - Values: - TYPE_UNKNOWN (0): - Unknown - PROPER (1): - Proper name - COMMON (2): - Common noun (or noun compound) - """ - TYPE_UNKNOWN = 0 - PROPER = 1 - COMMON = 2 - - text: 'TextSpan' = proto.Field( - proto.MESSAGE, - number=1, - message='TextSpan', - ) - type_: Type = proto.Field( - proto.ENUM, - number=2, - enum=Type, - ) - sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=3, - message='Sentiment', - ) - - -class TextSpan(proto.Message): - r"""Represents an output piece of text. - - Attributes: - content (str): - The content of the output text. - begin_offset (int): - The API calculates the beginning offset of the content in - the original document according to the - [EncodingType][google.cloud.language.v1beta2.EncodingType] - specified in the API request. - """ - - content: str = proto.Field( - proto.STRING, - number=1, - ) - begin_offset: int = proto.Field( - proto.INT32, - number=2, - ) - - -class ClassificationCategory(proto.Message): - r"""Represents a category returned from the text classifier. - - Attributes: - name (str): - The name of the category representing the - document. - confidence (float): - The classifier's confidence of the category. - Number represents how certain the classifier is - that this category represents the given text. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - confidence: float = proto.Field( - proto.FLOAT, - number=2, - ) - - -class ClassificationModelOptions(proto.Message): - r"""Model options available for classification requests. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - v1_model (google.cloud.language_v1beta2.types.ClassificationModelOptions.V1Model): - Setting this field will use the V1 model and - V1 content categories version. The V1 model is a - legacy model; support for this will be - discontinued in the future. - - This field is a member of `oneof`_ ``model_type``. - v2_model (google.cloud.language_v1beta2.types.ClassificationModelOptions.V2Model): - Setting this field will use the V2 model with - the appropriate content categories version. The - V2 model is a better performing model. - - This field is a member of `oneof`_ ``model_type``. - """ - - class V1Model(proto.Message): - r"""Options for the V1 model. - """ - - class V2Model(proto.Message): - r"""Options for the V2 model. - - Attributes: - content_categories_version (google.cloud.language_v1beta2.types.ClassificationModelOptions.V2Model.ContentCategoriesVersion): - The content categories used for - classification. - """ - class ContentCategoriesVersion(proto.Enum): - r"""The content categories used for classification. - - Values: - CONTENT_CATEGORIES_VERSION_UNSPECIFIED (0): - If ``ContentCategoriesVersion`` is not specified, this - option will default to ``V1``. - V1 (1): - Legacy content categories of our initial - launch in 2017. - V2 (2): - Updated content categories in 2022. - """ - CONTENT_CATEGORIES_VERSION_UNSPECIFIED = 0 - V1 = 1 - V2 = 2 - - content_categories_version: 'ClassificationModelOptions.V2Model.ContentCategoriesVersion' = proto.Field( - proto.ENUM, - number=1, - enum='ClassificationModelOptions.V2Model.ContentCategoriesVersion', - ) - - v1_model: V1Model = proto.Field( - proto.MESSAGE, - number=1, - oneof='model_type', - message=V1Model, - ) - v2_model: V2Model = proto.Field( - proto.MESSAGE, - number=2, - oneof='model_type', - message=V2Model, - ) - - -class AnalyzeSentimentRequest(proto.Message): - r"""The sentiment analysis request message. - - Attributes: - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate sentence offsets for the sentence - sentiment. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeSentimentResponse(proto.Message): - r"""The sentiment analysis response message. - - Attributes: - document_sentiment (google.cloud.language_v1beta2.types.Sentiment): - The overall sentiment of the input document. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1beta2.Document.language] - field for more details. - sentences (MutableSequence[google.cloud.language_v1beta2.types.Sentence]): - The sentiment for all the sentences in the - document. - """ - - document_sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=1, - message='Sentiment', - ) - language: str = proto.Field( - proto.STRING, - number=2, - ) - sentences: MutableSequence['Sentence'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Sentence', - ) - - -class AnalyzeEntitySentimentRequest(proto.Message): - r"""The entity-level sentiment analysis request message. - - Attributes: - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeEntitySentimentResponse(proto.Message): - r"""The entity-level sentiment analysis response message. - - Attributes: - entities (MutableSequence[google.cloud.language_v1beta2.types.Entity]): - The recognized entities in the input document - with associated sentiments. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1beta2.Document.language] - field for more details. - """ - - entities: MutableSequence['Entity'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entity', - ) - language: str = proto.Field( - proto.STRING, - number=2, - ) - - -class AnalyzeEntitiesRequest(proto.Message): - r"""The entity analysis request message. - - Attributes: - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeEntitiesResponse(proto.Message): - r"""The entity analysis response message. - - Attributes: - entities (MutableSequence[google.cloud.language_v1beta2.types.Entity]): - The recognized entities in the input - document. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1beta2.Document.language] - field for more details. - """ - - entities: MutableSequence['Entity'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entity', - ) - language: str = proto.Field( - proto.STRING, - number=2, - ) - - -class AnalyzeSyntaxRequest(proto.Message): - r"""The syntax analysis request message. - - Attributes: - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeSyntaxResponse(proto.Message): - r"""The syntax analysis response message. - - Attributes: - sentences (MutableSequence[google.cloud.language_v1beta2.types.Sentence]): - Sentences in the input document. - tokens (MutableSequence[google.cloud.language_v1beta2.types.Token]): - Tokens, along with their syntactic - information, in the input document. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1beta2.Document.language] - field for more details. - """ - - sentences: MutableSequence['Sentence'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Sentence', - ) - tokens: MutableSequence['Token'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='Token', - ) - language: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ClassifyTextRequest(proto.Message): - r"""The document classification request message. - - Attributes: - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - classification_model_options (google.cloud.language_v1beta2.types.ClassificationModelOptions): - Model options to use for classification. - Defaults to v1 options if not specified. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - classification_model_options: 'ClassificationModelOptions' = proto.Field( - proto.MESSAGE, - number=3, - message='ClassificationModelOptions', - ) - - -class ClassifyTextResponse(proto.Message): - r"""The document classification response message. - - Attributes: - categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): - Categories representing the input document. - """ - - categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ClassificationCategory', - ) - - -class ModerateTextRequest(proto.Message): - r"""The document moderation request message. - - Attributes: - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - - -class ModerateTextResponse(proto.Message): - r"""The document moderation response message. - - Attributes: - moderation_categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): - Harmful and sensitive categories representing - the input document. - """ - - moderation_categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ClassificationCategory', - ) - - -class AnnotateTextRequest(proto.Message): - r"""The request message for the text annotation API, which can - perform multiple analysis types (sentiment, entities, and - syntax) in one call. - - Attributes: - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - features (google.cloud.language_v1beta2.types.AnnotateTextRequest.Features): - Required. The enabled features. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - class Features(proto.Message): - r"""All available features for sentiment, syntax, and semantic - analysis. Setting each one to true will enable that specific - analysis for the input. Next ID: 12 - - Attributes: - extract_syntax (bool): - Extract syntax information. - extract_entities (bool): - Extract entities. - extract_document_sentiment (bool): - Extract document-level sentiment. - extract_entity_sentiment (bool): - Extract entities and their associated - sentiment. - classify_text (bool): - Classify the full document into categories. If this is true, - the API will use the default model which classifies into a - `predefined - taxonomy `__. - moderate_text (bool): - Moderate the document for harmful and - sensitive categories. - classification_model_options (google.cloud.language_v1beta2.types.ClassificationModelOptions): - The model options to use for classification. Defaults to v1 - options if not specified. Only used if ``classify_text`` is - set to true. - """ - - extract_syntax: bool = proto.Field( - proto.BOOL, - number=1, - ) - extract_entities: bool = proto.Field( - proto.BOOL, - number=2, - ) - extract_document_sentiment: bool = proto.Field( - proto.BOOL, - number=3, - ) - extract_entity_sentiment: bool = proto.Field( - proto.BOOL, - number=4, - ) - classify_text: bool = proto.Field( - proto.BOOL, - number=6, - ) - moderate_text: bool = proto.Field( - proto.BOOL, - number=11, - ) - classification_model_options: 'ClassificationModelOptions' = proto.Field( - proto.MESSAGE, - number=10, - message='ClassificationModelOptions', - ) - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - features: Features = proto.Field( - proto.MESSAGE, - number=2, - message=Features, - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=3, - enum='EncodingType', - ) - - -class AnnotateTextResponse(proto.Message): - r"""The text annotations response message. - - Attributes: - sentences (MutableSequence[google.cloud.language_v1beta2.types.Sentence]): - Sentences in the input document. Populated if the user - enables - [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax]. - tokens (MutableSequence[google.cloud.language_v1beta2.types.Token]): - Tokens, along with their syntactic information, in the input - document. Populated if the user enables - [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax]. - entities (MutableSequence[google.cloud.language_v1beta2.types.Entity]): - Entities, along with their semantic information, in the - input document. Populated if the user enables - [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entities]. - document_sentiment (google.cloud.language_v1beta2.types.Sentiment): - The overall sentiment for the document. Populated if the - user enables - [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment]. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1beta2.Document.language] - field for more details. - categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): - Categories identified in the input document. - moderation_categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): - Harmful and sensitive categories identified - in the input document. - """ - - sentences: MutableSequence['Sentence'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Sentence', - ) - tokens: MutableSequence['Token'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='Token', - ) - entities: MutableSequence['Entity'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Entity', - ) - document_sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=4, - message='Sentiment', - ) - language: str = proto.Field( - proto.STRING, - number=5, - ) - categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='ClassificationCategory', - ) - moderation_categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='ClassificationCategory', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta2/mypy.ini b/owl-bot-staging/v1beta2/mypy.ini deleted file mode 100644 index 574c5aed..00000000 --- a/owl-bot-staging/v1beta2/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v1beta2/noxfile.py b/owl-bot-staging/v1beta2/noxfile.py deleted file mode 100644 index 95cd6c8b..00000000 --- a/owl-bot-staging/v1beta2/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/language_v1beta2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py deleted file mode 100644 index ef2d4a6d..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeEntities_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -async def sample_analyze_entities(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entities(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeEntities_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py deleted file mode 100644 index b8c2694b..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeEntities_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -def sample_analyze_entities(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = client.analyze_entities(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeEntities_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py deleted file mode 100644 index 818d4209..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntitySentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -async def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py deleted file mode 100644 index cabc3ff5..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntitySentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py deleted file mode 100644 index b60e606a..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeSentiment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -async def sample_analyze_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeSentiment_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py deleted file mode 100644 index df735913..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeSentiment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -def sample_analyze_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeSentiment_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py deleted file mode 100644 index e42a0728..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSyntax -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeSyntax_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -async def sample_analyze_syntax(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = await client.analyze_syntax(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeSyntax_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py deleted file mode 100644 index f9ed77cc..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSyntax -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeSyntax_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -def sample_analyze_syntax(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = client.analyze_syntax(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeSyntax_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py deleted file mode 100644 index 5b17e2b1..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnnotateText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnnotateText_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -async def sample_annotate_text(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = await client.annotate_text(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnnotateText_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py deleted file mode 100644 index 701c94e5..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnnotateText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnnotateText_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -def sample_annotate_text(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = client.annotate_text(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnnotateText_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py deleted file mode 100644 index 94b5ebcc..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ClassifyText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_ClassifyText_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -async def sample_classify_text(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = await client.classify_text(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_ClassifyText_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py deleted file mode 100644 index f9415093..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ClassifyText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_ClassifyText_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -def sample_classify_text(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = client.classify_text(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_ClassifyText_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json b/owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json deleted file mode 100644 index fb6633f2..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json +++ /dev/null @@ -1,1190 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.language.v1beta2", - "version": "v1beta2" - } - ], - "language": "PYTHON", - "name": "google-cloud-language", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_entities", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntities", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse", - "shortName": "analyze_entities" - }, - "description": "Sample for AnalyzeEntities", - "file": "language_v1beta2_generated_language_service_analyze_entities_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntities_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_entities_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_entities", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntities", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse", - "shortName": "analyze_entities" - }, - "description": "Sample for AnalyzeEntities", - "file": "language_v1beta2_generated_language_service_analyze_entities_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntities_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_entities_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_entity_sentiment", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntitySentiment", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntitySentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse", - "shortName": "analyze_entity_sentiment" - }, - "description": "Sample for AnalyzeEntitySentiment", - "file": "language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_entity_sentiment", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntitySentiment", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntitySentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse", - "shortName": "analyze_entity_sentiment" - }, - "description": "Sample for AnalyzeEntitySentiment", - "file": "language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_sentiment", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSentiment", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeSentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeSentimentResponse", - "shortName": "analyze_sentiment" - }, - "description": "Sample for AnalyzeSentiment", - "file": "language_v1beta2_generated_language_service_analyze_sentiment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSentiment_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_sentiment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_sentiment", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSentiment", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeSentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeSentimentResponse", - "shortName": "analyze_sentiment" - }, - "description": "Sample for AnalyzeSentiment", - "file": "language_v1beta2_generated_language_service_analyze_sentiment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSentiment_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_sentiment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_syntax", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSyntax", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSyntax" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse", - "shortName": "analyze_syntax" - }, - "description": "Sample for AnalyzeSyntax", - "file": "language_v1beta2_generated_language_service_analyze_syntax_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSyntax_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_syntax_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_syntax", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSyntax", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSyntax" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse", - "shortName": "analyze_syntax" - }, - "description": "Sample for AnalyzeSyntax", - "file": "language_v1beta2_generated_language_service_analyze_syntax_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSyntax_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_syntax_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.annotate_text", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnnotateText", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnnotateText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "features", - "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest.Features" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnnotateTextResponse", - "shortName": "annotate_text" - }, - "description": "Sample for AnnotateText", - "file": "language_v1beta2_generated_language_service_annotate_text_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnnotateText_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_annotate_text_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.annotate_text", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnnotateText", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnnotateText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "features", - "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest.Features" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnnotateTextResponse", - "shortName": "annotate_text" - }, - "description": "Sample for AnnotateText", - "file": "language_v1beta2_generated_language_service_annotate_text_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnnotateText_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_annotate_text_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.classify_text", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.ClassifyText", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "ClassifyText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.ClassifyTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.ClassifyTextResponse", - "shortName": "classify_text" - }, - "description": "Sample for ClassifyText", - "file": "language_v1beta2_generated_language_service_classify_text_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_ClassifyText_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_classify_text_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.classify_text", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.ClassifyText", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "ClassifyText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.ClassifyTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.ClassifyTextResponse", - "shortName": "classify_text" - }, - "description": "Sample for ClassifyText", - "file": "language_v1beta2_generated_language_service_classify_text_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_ClassifyText_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_classify_text_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.moderate_text", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.ModerateText", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "ModerateText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.ModerateTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.ModerateTextResponse", - "shortName": "moderate_text" - }, - "description": "Sample for ModerateText", - "file": "language_v1beta2_generated_language_service_moderate_text_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_ModerateText_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_moderate_text_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.moderate_text", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.ModerateText", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "ModerateText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.ModerateTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.ModerateTextResponse", - "shortName": "moderate_text" - }, - "description": "Sample for ModerateText", - "file": "language_v1beta2_generated_language_service_moderate_text_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_ModerateText_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_moderate_text_sync.py" - } - ] -} diff --git a/owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py b/owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py deleted file mode 100644 index 10fa218c..00000000 --- a/owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py +++ /dev/null @@ -1,182 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class languageCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'analyze_entities': ('document', 'encoding_type', ), - 'analyze_entity_sentiment': ('document', 'encoding_type', ), - 'analyze_sentiment': ('document', 'encoding_type', ), - 'analyze_syntax': ('document', 'encoding_type', ), - 'annotate_text': ('document', 'features', 'encoding_type', ), - 'classify_text': ('document', 'classification_model_options', ), - 'moderate_text': ('document', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=languageCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the language client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1beta2/setup.py b/owl-bot-staging/v1beta2/setup.py deleted file mode 100644 index 047e5bce..00000000 --- a/owl-bot-staging/v1beta2/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-language' - - -description = "Google Cloud Language API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/language/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/python-language" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.10.txt b/owl-bot-staging/v1beta2/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1beta2/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.11.txt b/owl-bot-staging/v1beta2/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1beta2/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.12.txt b/owl-bot-staging/v1beta2/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1beta2/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.7.txt b/owl-bot-staging/v1beta2/testing/constraints-3.7.txt deleted file mode 100644 index 6c44adfe..00000000 --- a/owl-bot-staging/v1beta2/testing/constraints-3.7.txt +++ /dev/null @@ -1,9 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.8.txt b/owl-bot-staging/v1beta2/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1beta2/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.9.txt b/owl-bot-staging/v1beta2/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1beta2/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1beta2/tests/__init__.py b/owl-bot-staging/v1beta2/tests/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1beta2/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta2/tests/unit/__init__.py b/owl-bot-staging/v1beta2/tests/unit/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1beta2/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py b/owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py b/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py b/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py deleted file mode 100644 index 3e0b7671..00000000 --- a/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py +++ /dev/null @@ -1,4070 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.language_v1beta2.services.language_service import LanguageServiceAsyncClient -from google.cloud.language_v1beta2.services.language_service import LanguageServiceClient -from google.cloud.language_v1beta2.services.language_service import transports -from google.cloud.language_v1beta2.types import language_service -from google.oauth2 import service_account -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert LanguageServiceClient._get_default_mtls_endpoint(None) is None - assert LanguageServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (LanguageServiceClient, "grpc"), - (LanguageServiceAsyncClient, "grpc_asyncio"), - (LanguageServiceClient, "rest"), -]) -def test_language_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'language.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://language.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.LanguageServiceGrpcTransport, "grpc"), - (transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.LanguageServiceRestTransport, "rest"), -]) -def test_language_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (LanguageServiceClient, "grpc"), - (LanguageServiceAsyncClient, "grpc_asyncio"), - (LanguageServiceClient, "rest"), -]) -def test_language_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'language.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://language.googleapis.com' - ) - - -def test_language_service_client_get_transport_class(): - transport = LanguageServiceClient.get_transport_class() - available_transports = [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceRestTransport, - ] - assert transport in available_transports - - transport = LanguageServiceClient.get_transport_class("grpc") - assert transport == transports.LanguageServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), -]) -@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) -@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) -def test_language_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "true"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "false"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "true"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) -@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_language_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - LanguageServiceClient, LanguageServiceAsyncClient -]) -@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) -@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) -def test_language_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), -]) -def test_language_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", None), -]) -def test_language_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_language_service_client_client_options_from_dict(): - with mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = LanguageServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_language_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "language.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="language.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSentimentRequest, - dict, -]) -def test_analyze_sentiment(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSentimentResponse( - language='language_value', - ) - response = client.analyze_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_sentiment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - client.analyze_sentiment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSentimentRequest() - -@pytest.mark.asyncio -async def test_analyze_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSentimentRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse( - language='language_value', - )) - response = await client.analyze_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSentimentResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_sentiment_async_from_dict(): - await test_analyze_sentiment_async(request_type=dict) - - -def test_analyze_sentiment_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSentimentResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_sentiment_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_sentiment( - language_service.AnalyzeSentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_sentiment_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSentimentResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_sentiment_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_sentiment( - language_service.AnalyzeSentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitiesRequest, - dict, -]) -def test_analyze_entities(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitiesResponse( - language='language_value', - ) - response = client.analyze_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitiesResponse) - assert response.language == 'language_value' - - -def test_analyze_entities_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - client.analyze_entities() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitiesRequest() - -@pytest.mark.asyncio -async def test_analyze_entities_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitiesRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse( - language='language_value', - )) - response = await client.analyze_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitiesResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_entities_async_from_dict(): - await test_analyze_entities_async(request_type=dict) - - -def test_analyze_entities_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitiesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_entities( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_entities_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entities( - language_service.AnalyzeEntitiesRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_entities_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitiesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_entities( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_entities_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_entities( - language_service.AnalyzeEntitiesRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitySentimentRequest, - dict, -]) -def test_analyze_entity_sentiment(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitySentimentResponse( - language='language_value', - ) - response = client.analyze_entity_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitySentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_entity_sentiment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - client.analyze_entity_sentiment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitySentimentRequest() - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitySentimentRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse( - language='language_value', - )) - response = await client.analyze_entity_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitySentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_async_from_dict(): - await test_analyze_entity_sentiment_async(request_type=dict) - - -def test_analyze_entity_sentiment_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitySentimentResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_entity_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_entity_sentiment_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entity_sentiment( - language_service.AnalyzeEntitySentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitySentimentResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_entity_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_entity_sentiment( - language_service.AnalyzeEntitySentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSyntaxRequest, - dict, -]) -def test_analyze_syntax(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSyntaxResponse( - language='language_value', - ) - response = client.analyze_syntax(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSyntaxRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSyntaxResponse) - assert response.language == 'language_value' - - -def test_analyze_syntax_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - client.analyze_syntax() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSyntaxRequest() - -@pytest.mark.asyncio -async def test_analyze_syntax_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSyntaxRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse( - language='language_value', - )) - response = await client.analyze_syntax(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSyntaxRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSyntaxResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_syntax_async_from_dict(): - await test_analyze_syntax_async(request_type=dict) - - -def test_analyze_syntax_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSyntaxResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_syntax( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_syntax_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_syntax( - language_service.AnalyzeSyntaxRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_syntax_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSyntaxResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_syntax( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_syntax_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_syntax( - language_service.AnalyzeSyntaxRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.ClassifyTextRequest, - dict, -]) -def test_classify_text(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ClassifyTextResponse( - ) - response = client.classify_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ClassifyTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ClassifyTextResponse) - - -def test_classify_text_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - client.classify_text() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ClassifyTextRequest() - -@pytest.mark.asyncio -async def test_classify_text_async(transport: str = 'grpc_asyncio', request_type=language_service.ClassifyTextRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse( - )) - response = await client.classify_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ClassifyTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ClassifyTextResponse) - - -@pytest.mark.asyncio -async def test_classify_text_async_from_dict(): - await test_classify_text_async(request_type=dict) - - -def test_classify_text_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ClassifyTextResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.classify_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - - -def test_classify_text_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.classify_text( - language_service.ClassifyTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - -@pytest.mark.asyncio -async def test_classify_text_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ClassifyTextResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.classify_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_classify_text_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.classify_text( - language_service.ClassifyTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.ModerateTextRequest, - dict, -]) -def test_moderate_text(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ModerateTextResponse( - ) - response = client.moderate_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ModerateTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ModerateTextResponse) - - -def test_moderate_text_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - client.moderate_text() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ModerateTextRequest() - -@pytest.mark.asyncio -async def test_moderate_text_async(transport: str = 'grpc_asyncio', request_type=language_service.ModerateTextRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.ModerateTextResponse( - )) - response = await client.moderate_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ModerateTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ModerateTextResponse) - - -@pytest.mark.asyncio -async def test_moderate_text_async_from_dict(): - await test_moderate_text_async(request_type=dict) - - -def test_moderate_text_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ModerateTextResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.moderate_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - - -def test_moderate_text_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.moderate_text( - language_service.ModerateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - -@pytest.mark.asyncio -async def test_moderate_text_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ModerateTextResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.ModerateTextResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.moderate_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_moderate_text_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.moderate_text( - language_service.ModerateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnnotateTextRequest, - dict, -]) -def test_annotate_text(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnnotateTextResponse( - language='language_value', - ) - response = client.annotate_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnnotateTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnnotateTextResponse) - assert response.language == 'language_value' - - -def test_annotate_text_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - client.annotate_text() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnnotateTextRequest() - -@pytest.mark.asyncio -async def test_annotate_text_async(transport: str = 'grpc_asyncio', request_type=language_service.AnnotateTextRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse( - language='language_value', - )) - response = await client.annotate_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnnotateTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnnotateTextResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_annotate_text_async_from_dict(): - await test_annotate_text_async(request_type=dict) - - -def test_annotate_text_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnnotateTextResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.annotate_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].features - mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_annotate_text_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.annotate_text( - language_service.AnnotateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_annotate_text_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnnotateTextResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.annotate_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].features - mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_annotate_text_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.annotate_text( - language_service.AnnotateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSentimentRequest, - dict, -]) -def test_analyze_sentiment_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSentimentResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_sentiment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_sentiment_rest_required_fields(request_type=language_service.AnalyzeSentimentRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSentimentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_sentiment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_sentiment_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_sentiment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_sentiment_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_sentiment") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_sentiment") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeSentimentRequest.pb(language_service.AnalyzeSentimentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeSentimentResponse.to_json(language_service.AnalyzeSentimentResponse()) - - request = language_service.AnalyzeSentimentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeSentimentResponse() - - client.analyze_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSentimentRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_sentiment(request) - - -def test_analyze_sentiment_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSentimentResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_sentiment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta2/documents:analyzeSentiment" % client.transport._host, args[1]) - - -def test_analyze_sentiment_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_sentiment( - language_service.AnalyzeSentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_sentiment_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitiesRequest, - dict, -]) -def test_analyze_entities_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitiesResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_entities(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitiesResponse) - assert response.language == 'language_value' - - -def test_analyze_entities_rest_required_fields(request_type=language_service.AnalyzeEntitiesRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitiesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_entities(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_entities_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_entities._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_entities_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entities") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entities") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeEntitiesRequest.pb(language_service.AnalyzeEntitiesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeEntitiesResponse.to_json(language_service.AnalyzeEntitiesResponse()) - - request = language_service.AnalyzeEntitiesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeEntitiesResponse() - - client.analyze_entities(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_entities_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitiesRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_entities(request) - - -def test_analyze_entities_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitiesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_entities(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta2/documents:analyzeEntities" % client.transport._host, args[1]) - - -def test_analyze_entities_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entities( - language_service.AnalyzeEntitiesRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_entities_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitySentimentRequest, - dict, -]) -def test_analyze_entity_sentiment_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitySentimentResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_entity_sentiment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_entity_sentiment_rest_required_fields(request_type=language_service.AnalyzeEntitySentimentRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitySentimentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_entity_sentiment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_entity_sentiment_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_entity_sentiment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entity_sentiment") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entity_sentiment") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeEntitySentimentRequest.pb(language_service.AnalyzeEntitySentimentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeEntitySentimentResponse.to_json(language_service.AnalyzeEntitySentimentResponse()) - - request = language_service.AnalyzeEntitySentimentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeEntitySentimentResponse() - - client.analyze_entity_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_entity_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitySentimentRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_entity_sentiment(request) - - -def test_analyze_entity_sentiment_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitySentimentResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_entity_sentiment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta2/documents:analyzeEntitySentiment" % client.transport._host, args[1]) - - -def test_analyze_entity_sentiment_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entity_sentiment( - language_service.AnalyzeEntitySentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_entity_sentiment_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSyntaxRequest, - dict, -]) -def test_analyze_syntax_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSyntaxResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_syntax(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSyntaxResponse) - assert response.language == 'language_value' - - -def test_analyze_syntax_rest_required_fields(request_type=language_service.AnalyzeSyntaxRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSyntaxResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_syntax(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_syntax_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_syntax._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_syntax_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_syntax") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_syntax") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeSyntaxRequest.pb(language_service.AnalyzeSyntaxRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeSyntaxResponse.to_json(language_service.AnalyzeSyntaxResponse()) - - request = language_service.AnalyzeSyntaxRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeSyntaxResponse() - - client.analyze_syntax(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_syntax_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSyntaxRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_syntax(request) - - -def test_analyze_syntax_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSyntaxResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_syntax(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta2/documents:analyzeSyntax" % client.transport._host, args[1]) - - -def test_analyze_syntax_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_syntax( - language_service.AnalyzeSyntaxRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_syntax_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.ClassifyTextRequest, - dict, -]) -def test_classify_text_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.ClassifyTextResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.ClassifyTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.classify_text(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ClassifyTextResponse) - - -def test_classify_text_rest_required_fields(request_type=language_service.ClassifyTextRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.ClassifyTextResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.ClassifyTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.classify_text(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_classify_text_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.classify_text._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_classify_text_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_classify_text") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_classify_text") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.ClassifyTextRequest.pb(language_service.ClassifyTextRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.ClassifyTextResponse.to_json(language_service.ClassifyTextResponse()) - - request = language_service.ClassifyTextRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.ClassifyTextResponse() - - client.classify_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_classify_text_rest_bad_request(transport: str = 'rest', request_type=language_service.ClassifyTextRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.classify_text(request) - - -def test_classify_text_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.ClassifyTextResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.ClassifyTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.classify_text(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta2/documents:classifyText" % client.transport._host, args[1]) - - -def test_classify_text_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.classify_text( - language_service.ClassifyTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - -def test_classify_text_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.ModerateTextRequest, - dict, -]) -def test_moderate_text_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.ModerateTextResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.ModerateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.moderate_text(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ModerateTextResponse) - - -def test_moderate_text_rest_required_fields(request_type=language_service.ModerateTextRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).moderate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).moderate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.ModerateTextResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.ModerateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.moderate_text(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_moderate_text_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.moderate_text._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_moderate_text_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_moderate_text") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_moderate_text") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.ModerateTextRequest.pb(language_service.ModerateTextRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.ModerateTextResponse.to_json(language_service.ModerateTextResponse()) - - request = language_service.ModerateTextRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.ModerateTextResponse() - - client.moderate_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_moderate_text_rest_bad_request(transport: str = 'rest', request_type=language_service.ModerateTextRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.moderate_text(request) - - -def test_moderate_text_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.ModerateTextResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.ModerateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.moderate_text(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta2/documents:moderateText" % client.transport._host, args[1]) - - -def test_moderate_text_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.moderate_text( - language_service.ModerateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - -def test_moderate_text_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnnotateTextRequest, - dict, -]) -def test_annotate_text_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnnotateTextResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnnotateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.annotate_text(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnnotateTextResponse) - assert response.language == 'language_value' - - -def test_annotate_text_rest_required_fields(request_type=language_service.AnnotateTextRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnnotateTextResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnnotateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.annotate_text(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_annotate_text_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.annotate_text._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", "features", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_annotate_text_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_annotate_text") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_annotate_text") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnnotateTextRequest.pb(language_service.AnnotateTextRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnnotateTextResponse.to_json(language_service.AnnotateTextResponse()) - - request = language_service.AnnotateTextRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnnotateTextResponse() - - client.annotate_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_annotate_text_rest_bad_request(transport: str = 'rest', request_type=language_service.AnnotateTextRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.annotate_text(request) - - -def test_annotate_text_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnnotateTextResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnnotateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.annotate_text(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta2/documents:annotateText" % client.transport._host, args[1]) - - -def test_annotate_text_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.annotate_text( - language_service.AnnotateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_annotate_text_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = LanguageServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.LanguageServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceGrpcAsyncIOTransport, - transports.LanguageServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = LanguageServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.LanguageServiceGrpcTransport, - ) - -def test_language_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.LanguageServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_language_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.LanguageServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'analyze_sentiment', - 'analyze_entities', - 'analyze_entity_sentiment', - 'analyze_syntax', - 'classify_text', - 'moderate_text', - 'annotate_text', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_language_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LanguageServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_language_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LanguageServiceTransport() - adc.assert_called_once() - - -def test_language_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LanguageServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceGrpcAsyncIOTransport, - ], -) -def test_language_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-language', 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceGrpcAsyncIOTransport, - transports.LanguageServiceRestTransport, - ], -) -def test_language_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.LanguageServiceGrpcTransport, grpc_helpers), - (transports.LanguageServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_language_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "language.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="language.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) -def test_language_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_language_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.LanguageServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_language_service_host_no_port(transport_name): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'language.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://language.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_language_service_host_with_port(transport_name): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'language.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://language.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_language_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = LanguageServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = LanguageServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.analyze_sentiment._session - session2 = client2.transport.analyze_sentiment._session - assert session1 != session2 - session1 = client1.transport.analyze_entities._session - session2 = client2.transport.analyze_entities._session - assert session1 != session2 - session1 = client1.transport.analyze_entity_sentiment._session - session2 = client2.transport.analyze_entity_sentiment._session - assert session1 != session2 - session1 = client1.transport.analyze_syntax._session - session2 = client2.transport.analyze_syntax._session - assert session1 != session2 - session1 = client1.transport.classify_text._session - session2 = client2.transport.classify_text._session - assert session1 != session2 - session1 = client1.transport.moderate_text._session - session2 = client2.transport.moderate_text._session - assert session1 != session2 - session1 = client1.transport.annotate_text._session - session2 = client2.transport.annotate_text._session - assert session1 != session2 -def test_language_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LanguageServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_language_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LanguageServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) -def test_language_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) -def test_language_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = LanguageServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = LanguageServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = LanguageServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = LanguageServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = LanguageServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = LanguageServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = LanguageServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = LanguageServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = LanguageServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = LanguageServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = LanguageServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py b/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py similarity index 100% rename from owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py rename to samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py b/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py similarity index 100% rename from owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py rename to samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py diff --git a/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json index 9651e696..936a8b70 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-language", - "version": "2.9.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json index 6ef547cc..fb6633f2 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-language", - "version": "2.9.1" + "version": "0.1.0" }, "snippets": [ { @@ -1024,6 +1024,167 @@ } ], "title": "language_v1beta2_generated_language_service_classify_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.moderate_text", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.ModerateText", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ModerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.ModerateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.ModerateTextResponse", + "shortName": "moderate_text" + }, + "description": "Sample for ModerateText", + "file": "language_v1beta2_generated_language_service_moderate_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_ModerateText_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_moderate_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.moderate_text", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.ModerateText", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ModerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.ModerateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.ModerateTextResponse", + "shortName": "moderate_text" + }, + "description": "Sample for ModerateText", + "file": "language_v1beta2_generated_language_service_moderate_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_ModerateText_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_moderate_text_sync.py" } ] } diff --git a/scripts/fixup_language_v1beta2_keywords.py b/scripts/fixup_language_v1beta2_keywords.py index fc15df57..10fa218c 100644 --- a/scripts/fixup_language_v1beta2_keywords.py +++ b/scripts/fixup_language_v1beta2_keywords.py @@ -45,6 +45,7 @@ class languageCallTransformer(cst.CSTTransformer): 'analyze_syntax': ('document', 'encoding_type', ), 'annotate_text': ('document', 'features', 'encoding_type', ), 'classify_text': ('document', 'classification_model_options', ), + 'moderate_text': ('document', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/tests/unit/gapic/language_v1beta2/test_language_service.py b/tests/unit/gapic/language_v1beta2/test_language_service.py index 809ce0e0..adfc2301 100644 --- a/tests/unit/gapic/language_v1beta2/test_language_service.py +++ b/tests/unit/gapic/language_v1beta2/test_language_service.py @@ -1700,6 +1700,183 @@ async def test_classify_text_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + language_service.ModerateTextRequest, + dict, + ], +) +def test_moderate_text(request_type, transport: str = "grpc"): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.moderate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse() + response = client.moderate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +def test_moderate_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.moderate_text), "__call__") as call: + client.moderate_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + + +@pytest.mark.asyncio +async def test_moderate_text_async( + transport: str = "grpc_asyncio", request_type=language_service.ModerateTextRequest +): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.moderate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.ModerateTextResponse() + ) + response = await client.moderate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +@pytest.mark.asyncio +async def test_moderate_text_async_from_dict(): + await test_moderate_text_async(request_type=dict) + + +def test_moderate_text_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.moderate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.moderate_text( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + assert arg == mock_val + + +def test_moderate_text_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + +@pytest.mark.asyncio +async def test_moderate_text_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.moderate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.ModerateTextResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.moderate_text( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_moderate_text_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3247,6 +3424,269 @@ def test_classify_text_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + language_service.ModerateTextRequest, + dict, + ], +) +def test_moderate_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.moderate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +def test_moderate_text_rest_required_fields( + request_type=language_service.ModerateTextRequest, +): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).moderate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).moderate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.moderate_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_moderate_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.moderate_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_moderate_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_moderate_text" + ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "pre_moderate_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.ModerateTextRequest.pb( + language_service.ModerateTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.ModerateTextResponse.to_json( + language_service.ModerateTextResponse() + ) + + request = language_service.ModerateTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.ModerateTextResponse() + + client.moderate_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_moderate_text_rest_bad_request( + transport: str = "rest", request_type=language_service.ModerateTextRequest +): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.moderate_text(request) + + +def test_moderate_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.moderate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/documents:moderateText" % client.transport._host, args[1] + ) + + +def test_moderate_text_rest_flattened_error(transport: str = "rest"): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + +def test_moderate_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3669,6 +4109,7 @@ def test_language_service_base_transport(): "analyze_entity_sentiment", "analyze_syntax", "classify_text", + "moderate_text", "annotate_text", ) for method in methods: @@ -3955,6 +4396,9 @@ def test_language_service_client_transport_session_collision(transport_name): session1 = client1.transport.classify_text._session session2 = client2.transport.classify_text._session assert session1 != session2 + session1 = client1.transport.moderate_text._session + session2 = client2.transport.moderate_text._session + assert session1 != session2 session1 = client1.transport.annotate_text._session session2 = client2.transport.annotate_text._session assert session1 != session2 From 3e12fa47780342e4f56fd5a44c79294b2069a81c Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 25 May 2023 00:49:58 +0000 Subject: [PATCH 3/4] feat: Added client library support for ModerateText in the Natural Language API (V1) PiperOrigin-RevId: 534992604 Source-Link: https://github.com/googleapis/googleapis/commit/2400facb5e4d93de884e58d38656b707df43fffa Source-Link: https://github.com/googleapis/googleapis-gen/commit/32caa580131b05b880fa3b802ca0c21179881733 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzJjYWE1ODAxMzFiMDViODgwZmEzYjgwMmNhMGMyMTE3OTg4MTczMyJ9 --- owl-bot-staging/v1/.coveragerc | 13 + owl-bot-staging/v1/.flake8 | 33 + owl-bot-staging/v1/MANIFEST.in | 2 + owl-bot-staging/v1/README.rst | 49 + owl-bot-staging/v1/docs/conf.py | 376 ++ owl-bot-staging/v1/docs/index.rst | 7 + .../v1/docs/language_v1/language_service.rst | 6 + .../v1/docs/language_v1/services.rst | 6 + owl-bot-staging/v1/docs/language_v1/types.rst | 6 + .../v1/google/cloud/language/__init__.py | 79 + .../v1/google/cloud/language/gapic_version.py | 16 + .../v1/google/cloud/language/py.typed | 2 + .../v1/google/cloud/language_v1/__init__.py | 80 + .../cloud/language_v1/gapic_metadata.json | 133 + .../google/cloud/language_v1/gapic_version.py | 16 + .../v1/google/cloud/language_v1/py.typed | 2 + .../cloud/language_v1/services/__init__.py | 15 + .../services/language_service/__init__.py | 22 + .../services/language_service/async_client.py | 962 ++++ .../services/language_service/client.py | 1115 +++++ .../language_service/transports/__init__.py | 38 + .../language_service/transports/base.py | 275 ++ .../language_service/transports/grpc.py | 432 ++ .../transports/grpc_asyncio.py | 431 ++ .../language_service/transports/rest.py | 1029 +++++ .../cloud/language_v1/types/__init__.py | 72 + .../language_v1/types/language_service.py | 1724 +++++++ owl-bot-staging/v1/mypy.ini | 3 + owl-bot-staging/v1/noxfile.py | 184 + ...language_service_analyze_entities_async.py | 55 + ..._language_service_analyze_entities_sync.py | 55 + ..._service_analyze_entity_sentiment_async.py | 55 + ...e_service_analyze_entity_sentiment_sync.py | 55 + ...anguage_service_analyze_sentiment_async.py | 55 + ...language_service_analyze_sentiment_sync.py | 55 + ...d_language_service_analyze_syntax_async.py | 55 + ...ed_language_service_analyze_syntax_sync.py | 55 + ...ed_language_service_annotate_text_async.py | 55 + ...ted_language_service_annotate_text_sync.py | 55 + ...ed_language_service_classify_text_async.py | 55 + ...ted_language_service_classify_text_sync.py | 55 + ...ed_language_service_moderate_text_async.py | 55 + ...ted_language_service_moderate_text_sync.py | 55 + ...pet_metadata_google.cloud.language.v1.json | 1190 +++++ .../v1/scripts/fixup_language_v1_keywords.py | 182 + owl-bot-staging/v1/setup.py | 90 + .../v1/testing/constraints-3.10.txt | 6 + .../v1/testing/constraints-3.11.txt | 6 + .../v1/testing/constraints-3.12.txt | 6 + .../v1/testing/constraints-3.7.txt | 9 + .../v1/testing/constraints-3.8.txt | 6 + .../v1/testing/constraints-3.9.txt | 6 + owl-bot-staging/v1/tests/__init__.py | 16 + owl-bot-staging/v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/language_v1/__init__.py | 16 + .../language_v1/test_language_service.py | 4070 +++++++++++++++++ owl-bot-staging/v1beta2/.coveragerc | 13 + owl-bot-staging/v1beta2/.flake8 | 33 + owl-bot-staging/v1beta2/MANIFEST.in | 2 + owl-bot-staging/v1beta2/README.rst | 49 + owl-bot-staging/v1beta2/docs/conf.py | 376 ++ owl-bot-staging/v1beta2/docs/index.rst | 7 + .../language_v1beta2/language_service.rst | 6 + .../docs/language_v1beta2/services.rst | 6 + .../v1beta2/docs/language_v1beta2/types.rst | 6 + .../v1beta2/google/cloud/language/__init__.py | 79 + .../google/cloud/language/gapic_version.py | 16 + .../v1beta2/google/cloud/language/py.typed | 2 + .../google/cloud/language_v1beta2/__init__.py | 80 + .../language_v1beta2/gapic_metadata.json | 133 + .../cloud/language_v1beta2/gapic_version.py | 16 + .../google/cloud/language_v1beta2/py.typed | 2 + .../language_v1beta2/services/__init__.py | 15 + .../services/language_service/__init__.py | 22 + .../services/language_service/async_client.py | 963 ++++ .../services/language_service/client.py | 1116 +++++ .../language_service/transports/__init__.py | 38 + .../language_service/transports/base.py | 275 ++ .../language_service/transports/grpc.py | 432 ++ .../transports/grpc_asyncio.py | 431 ++ .../language_service/transports/rest.py | 1029 +++++ .../cloud/language_v1beta2/types/__init__.py | 72 + .../types/language_service.py | 1761 +++++++ owl-bot-staging/v1beta2/mypy.ini | 3 + owl-bot-staging/v1beta2/noxfile.py | 184 + ...language_service_analyze_entities_async.py | 55 + ..._language_service_analyze_entities_sync.py | 55 + ..._service_analyze_entity_sentiment_async.py | 55 + ...e_service_analyze_entity_sentiment_sync.py | 55 + ...anguage_service_analyze_sentiment_async.py | 55 + ...language_service_analyze_sentiment_sync.py | 55 + ...d_language_service_analyze_syntax_async.py | 55 + ...ed_language_service_analyze_syntax_sync.py | 55 + ...ed_language_service_annotate_text_async.py | 55 + ...ted_language_service_annotate_text_sync.py | 55 + ...ed_language_service_classify_text_async.py | 55 + ...ted_language_service_classify_text_sync.py | 55 + ...ed_language_service_moderate_text_async.py | 55 + ...ted_language_service_moderate_text_sync.py | 55 + ...etadata_google.cloud.language.v1beta2.json | 1190 +++++ .../fixup_language_v1beta2_keywords.py | 182 + owl-bot-staging/v1beta2/setup.py | 90 + .../v1beta2/testing/constraints-3.10.txt | 6 + .../v1beta2/testing/constraints-3.11.txt | 6 + .../v1beta2/testing/constraints-3.12.txt | 6 + .../v1beta2/testing/constraints-3.7.txt | 9 + .../v1beta2/testing/constraints-3.8.txt | 6 + .../v1beta2/testing/constraints-3.9.txt | 6 + owl-bot-staging/v1beta2/tests/__init__.py | 16 + .../v1beta2/tests/unit/__init__.py | 16 + .../v1beta2/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/language_v1beta2/__init__.py | 16 + .../language_v1beta2/test_language_service.py | 4070 +++++++++++++++++ 114 files changed, 27105 insertions(+) create mode 100644 owl-bot-staging/v1/.coveragerc create mode 100644 owl-bot-staging/v1/.flake8 create mode 100644 owl-bot-staging/v1/MANIFEST.in create mode 100644 owl-bot-staging/v1/README.rst create mode 100644 owl-bot-staging/v1/docs/conf.py create mode 100644 owl-bot-staging/v1/docs/index.rst create mode 100644 owl-bot-staging/v1/docs/language_v1/language_service.rst create mode 100644 owl-bot-staging/v1/docs/language_v1/services.rst create mode 100644 owl-bot-staging/v1/docs/language_v1/types.rst create mode 100644 owl-bot-staging/v1/google/cloud/language/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/language/gapic_version.py create mode 100644 owl-bot-staging/v1/google/cloud/language/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py create mode 100644 owl-bot-staging/v1/mypy.ini create mode 100644 owl-bot-staging/v1/noxfile.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_moderate_text_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_moderate_text_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json create mode 100644 owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py create mode 100644 owl-bot-staging/v1/setup.py create mode 100644 owl-bot-staging/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/v1/tests/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py create mode 100644 owl-bot-staging/v1beta2/.coveragerc create mode 100644 owl-bot-staging/v1beta2/.flake8 create mode 100644 owl-bot-staging/v1beta2/MANIFEST.in create mode 100644 owl-bot-staging/v1beta2/README.rst create mode 100644 owl-bot-staging/v1beta2/docs/conf.py create mode 100644 owl-bot-staging/v1beta2/docs/index.rst create mode 100644 owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst create mode 100644 owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst create mode 100644 owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst create mode 100644 owl-bot-staging/v1beta2/google/cloud/language/__init__.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language/py.typed create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py create mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py create mode 100644 owl-bot-staging/v1beta2/mypy.ini create mode 100644 owl-bot-staging/v1beta2/noxfile.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py create mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json create mode 100644 owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py create mode 100644 owl-bot-staging/v1beta2/setup.py create mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/v1beta2/tests/__init__.py create mode 100644 owl-bot-staging/v1beta2/tests/unit/__init__.py create mode 100644 owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py create mode 100644 owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc new file mode 100644 index 00000000..c1f51536 --- /dev/null +++ b/owl-bot-staging/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/language/__init__.py + google/cloud/language/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v1/.flake8 b/owl-bot-staging/v1/.flake8 new file mode 100644 index 00000000..29227d4c --- /dev/null +++ b/owl-bot-staging/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in new file mode 100644 index 00000000..e0f21a43 --- /dev/null +++ b/owl-bot-staging/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/language *.py +recursive-include google/cloud/language_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst new file mode 100644 index 00000000..0c5f1b6b --- /dev/null +++ b/owl-bot-staging/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Language API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Language API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py new file mode 100644 index 00000000..2e1b322d --- /dev/null +++ b/owl-bot-staging/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-language documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-language" +copyright = u"2022, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-language-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-language.tex", + u"google-cloud-language Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-language", + u"Google Cloud Language Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-language", + u"google-cloud-language Documentation", + author, + "google-cloud-language", + "GAPIC library for Google Cloud Language API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst new file mode 100644 index 00000000..90928956 --- /dev/null +++ b/owl-bot-staging/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + language_v1/services + language_v1/types diff --git a/owl-bot-staging/v1/docs/language_v1/language_service.rst b/owl-bot-staging/v1/docs/language_v1/language_service.rst new file mode 100644 index 00000000..96e8755a --- /dev/null +++ b/owl-bot-staging/v1/docs/language_v1/language_service.rst @@ -0,0 +1,6 @@ +LanguageService +--------------------------------- + +.. automodule:: google.cloud.language_v1.services.language_service + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/language_v1/services.rst b/owl-bot-staging/v1/docs/language_v1/services.rst new file mode 100644 index 00000000..26f74fe9 --- /dev/null +++ b/owl-bot-staging/v1/docs/language_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Language v1 API +========================================= +.. toctree:: + :maxdepth: 2 + + language_service diff --git a/owl-bot-staging/v1/docs/language_v1/types.rst b/owl-bot-staging/v1/docs/language_v1/types.rst new file mode 100644 index 00000000..5dd3769e --- /dev/null +++ b/owl-bot-staging/v1/docs/language_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Language v1 API +====================================== + +.. automodule:: google.cloud.language_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v1/google/cloud/language/__init__.py b/owl-bot-staging/v1/google/cloud/language/__init__.py new file mode 100644 index 00000000..f65e8909 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language/__init__.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.language import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.language_v1.services.language_service.client import LanguageServiceClient +from google.cloud.language_v1.services.language_service.async_client import LanguageServiceAsyncClient + +from google.cloud.language_v1.types.language_service import AnalyzeEntitiesRequest +from google.cloud.language_v1.types.language_service import AnalyzeEntitiesResponse +from google.cloud.language_v1.types.language_service import AnalyzeEntitySentimentRequest +from google.cloud.language_v1.types.language_service import AnalyzeEntitySentimentResponse +from google.cloud.language_v1.types.language_service import AnalyzeSentimentRequest +from google.cloud.language_v1.types.language_service import AnalyzeSentimentResponse +from google.cloud.language_v1.types.language_service import AnalyzeSyntaxRequest +from google.cloud.language_v1.types.language_service import AnalyzeSyntaxResponse +from google.cloud.language_v1.types.language_service import AnnotateTextRequest +from google.cloud.language_v1.types.language_service import AnnotateTextResponse +from google.cloud.language_v1.types.language_service import ClassificationCategory +from google.cloud.language_v1.types.language_service import ClassificationModelOptions +from google.cloud.language_v1.types.language_service import ClassifyTextRequest +from google.cloud.language_v1.types.language_service import ClassifyTextResponse +from google.cloud.language_v1.types.language_service import DependencyEdge +from google.cloud.language_v1.types.language_service import Document +from google.cloud.language_v1.types.language_service import Entity +from google.cloud.language_v1.types.language_service import EntityMention +from google.cloud.language_v1.types.language_service import ModerateTextRequest +from google.cloud.language_v1.types.language_service import ModerateTextResponse +from google.cloud.language_v1.types.language_service import PartOfSpeech +from google.cloud.language_v1.types.language_service import Sentence +from google.cloud.language_v1.types.language_service import Sentiment +from google.cloud.language_v1.types.language_service import TextSpan +from google.cloud.language_v1.types.language_service import Token +from google.cloud.language_v1.types.language_service import EncodingType + +__all__ = ('LanguageServiceClient', + 'LanguageServiceAsyncClient', + 'AnalyzeEntitiesRequest', + 'AnalyzeEntitiesResponse', + 'AnalyzeEntitySentimentRequest', + 'AnalyzeEntitySentimentResponse', + 'AnalyzeSentimentRequest', + 'AnalyzeSentimentResponse', + 'AnalyzeSyntaxRequest', + 'AnalyzeSyntaxResponse', + 'AnnotateTextRequest', + 'AnnotateTextResponse', + 'ClassificationCategory', + 'ClassificationModelOptions', + 'ClassifyTextRequest', + 'ClassifyTextResponse', + 'DependencyEdge', + 'Document', + 'Entity', + 'EntityMention', + 'ModerateTextRequest', + 'ModerateTextResponse', + 'PartOfSpeech', + 'Sentence', + 'Sentiment', + 'TextSpan', + 'Token', + 'EncodingType', +) diff --git a/owl-bot-staging/v1/google/cloud/language/gapic_version.py b/owl-bot-staging/v1/google/cloud/language/gapic_version.py new file mode 100644 index 00000000..405b1ceb --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/language/py.typed b/owl-bot-staging/v1/google/cloud/language/py.typed new file mode 100644 index 00000000..c0acc99a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/language_v1/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/__init__.py new file mode 100644 index 00000000..98a10950 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/__init__.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.language_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.language_service import LanguageServiceClient +from .services.language_service import LanguageServiceAsyncClient + +from .types.language_service import AnalyzeEntitiesRequest +from .types.language_service import AnalyzeEntitiesResponse +from .types.language_service import AnalyzeEntitySentimentRequest +from .types.language_service import AnalyzeEntitySentimentResponse +from .types.language_service import AnalyzeSentimentRequest +from .types.language_service import AnalyzeSentimentResponse +from .types.language_service import AnalyzeSyntaxRequest +from .types.language_service import AnalyzeSyntaxResponse +from .types.language_service import AnnotateTextRequest +from .types.language_service import AnnotateTextResponse +from .types.language_service import ClassificationCategory +from .types.language_service import ClassificationModelOptions +from .types.language_service import ClassifyTextRequest +from .types.language_service import ClassifyTextResponse +from .types.language_service import DependencyEdge +from .types.language_service import Document +from .types.language_service import Entity +from .types.language_service import EntityMention +from .types.language_service import ModerateTextRequest +from .types.language_service import ModerateTextResponse +from .types.language_service import PartOfSpeech +from .types.language_service import Sentence +from .types.language_service import Sentiment +from .types.language_service import TextSpan +from .types.language_service import Token +from .types.language_service import EncodingType + +__all__ = ( + 'LanguageServiceAsyncClient', +'AnalyzeEntitiesRequest', +'AnalyzeEntitiesResponse', +'AnalyzeEntitySentimentRequest', +'AnalyzeEntitySentimentResponse', +'AnalyzeSentimentRequest', +'AnalyzeSentimentResponse', +'AnalyzeSyntaxRequest', +'AnalyzeSyntaxResponse', +'AnnotateTextRequest', +'AnnotateTextResponse', +'ClassificationCategory', +'ClassificationModelOptions', +'ClassifyTextRequest', +'ClassifyTextResponse', +'DependencyEdge', +'Document', +'EncodingType', +'Entity', +'EntityMention', +'LanguageServiceClient', +'ModerateTextRequest', +'ModerateTextResponse', +'PartOfSpeech', +'Sentence', +'Sentiment', +'TextSpan', +'Token', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json new file mode 100644 index 00000000..fa2c065a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json @@ -0,0 +1,133 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.language_v1", + "protoPackage": "google.cloud.language.v1", + "schema": "1.0", + "services": { + "LanguageService": { + "clients": { + "grpc": { + "libraryClient": "LanguageServiceClient", + "rpcs": { + "AnalyzeEntities": { + "methods": [ + "analyze_entities" + ] + }, + "AnalyzeEntitySentiment": { + "methods": [ + "analyze_entity_sentiment" + ] + }, + "AnalyzeSentiment": { + "methods": [ + "analyze_sentiment" + ] + }, + "AnalyzeSyntax": { + "methods": [ + "analyze_syntax" + ] + }, + "AnnotateText": { + "methods": [ + "annotate_text" + ] + }, + "ClassifyText": { + "methods": [ + "classify_text" + ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] + } + } + }, + "grpc-async": { + "libraryClient": "LanguageServiceAsyncClient", + "rpcs": { + "AnalyzeEntities": { + "methods": [ + "analyze_entities" + ] + }, + "AnalyzeEntitySentiment": { + "methods": [ + "analyze_entity_sentiment" + ] + }, + "AnalyzeSentiment": { + "methods": [ + "analyze_sentiment" + ] + }, + "AnalyzeSyntax": { + "methods": [ + "analyze_syntax" + ] + }, + "AnnotateText": { + "methods": [ + "annotate_text" + ] + }, + "ClassifyText": { + "methods": [ + "classify_text" + ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] + } + } + }, + "rest": { + "libraryClient": "LanguageServiceClient", + "rpcs": { + "AnalyzeEntities": { + "methods": [ + "analyze_entities" + ] + }, + "AnalyzeEntitySentiment": { + "methods": [ + "analyze_entity_sentiment" + ] + }, + "AnalyzeSentiment": { + "methods": [ + "analyze_sentiment" + ] + }, + "AnalyzeSyntax": { + "methods": [ + "analyze_syntax" + ] + }, + "AnnotateText": { + "methods": [ + "annotate_text" + ] + }, + "ClassifyText": { + "methods": [ + "classify_text" + ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py b/owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py new file mode 100644 index 00000000..405b1ceb --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/language_v1/py.typed b/owl-bot-staging/v1/google/cloud/language_v1/py.typed new file mode 100644 index 00000000..c0acc99a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py new file mode 100644 index 00000000..e8e1c384 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py new file mode 100644 index 00000000..6e5f9052 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import LanguageServiceClient +from .async_client import LanguageServiceAsyncClient + +__all__ = ( + 'LanguageServiceClient', + 'LanguageServiceAsyncClient', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py new file mode 100644 index 00000000..cbd88917 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py @@ -0,0 +1,962 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.language_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.language_v1.types import language_service +from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .client import LanguageServiceClient + + +class LanguageServiceAsyncClient: + """Provides text analysis operations such as sentiment analysis + and entity recognition. + """ + + _client: LanguageServiceClient + + DEFAULT_ENDPOINT = LanguageServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LanguageServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod(LanguageServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(LanguageServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(LanguageServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(LanguageServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(LanguageServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(LanguageServiceClient.parse_common_organization_path) + common_project_path = staticmethod(LanguageServiceClient.common_project_path) + parse_common_project_path = staticmethod(LanguageServiceClient.parse_common_project_path) + common_location_path = staticmethod(LanguageServiceClient.common_location_path) + parse_common_location_path = staticmethod(LanguageServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceAsyncClient: The constructed client. + """ + return LanguageServiceClient.from_service_account_info.__func__(LanguageServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceAsyncClient: The constructed client. + """ + return LanguageServiceClient.from_service_account_file.__func__(LanguageServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return LanguageServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> LanguageServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LanguageServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(LanguageServiceClient).get_transport_class, type(LanguageServiceClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, LanguageServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the language service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.LanguageServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = LanguageServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def analyze_sentiment(self, + request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: + r"""Analyzes the sentiment of the provided text. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + async def sample_analyze_sentiment(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1.types.AnalyzeSentimentRequest, dict]]): + The request object. The sentiment analysis request + message. + document (:class:`google.cloud.language_v1.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): + The encoding type used by the API to + calculate sentence offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeSentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_entities(self, + request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + async def sample_analyze_entities(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entities(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1.types.AnalyzeEntitiesRequest, dict]]): + The request object. The entity analysis request message. + document (:class:`google.cloud.language_v1.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeEntitiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_entities, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_entity_sentiment(self, + request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + async def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1.types.AnalyzeEntitySentimentRequest, dict]]): + The request object. The entity-level sentiment analysis + request message. + document (:class:`google.cloud.language_v1.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeEntitySentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_entity_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_syntax(self, + request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + async def sample_analyze_syntax(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = await client.analyze_syntax(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1.types.AnalyzeSyntaxRequest, dict]]): + The request object. The syntax analysis request message. + document (:class:`google.cloud.language_v1.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeSyntaxRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_syntax, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def classify_text(self, + request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ClassifyTextResponse: + r"""Classifies a document into categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + async def sample_classify_text(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = await client.classify_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1.types.ClassifyTextRequest, dict]]): + The request object. The document classification request + message. + document (:class:`google.cloud.language_v1.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.ClassifyTextResponse: + The document classification response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.ClassifyTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.classify_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def moderate_text(self, + request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ModerateTextResponse: + r"""Moderates a document for harmful and sensitive + categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + async def sample_moderate_text(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.ModerateTextRequest( + document=document, + ) + + # Make the request + response = await client.moderate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1.types.ModerateTextRequest, dict]]): + The request object. The document moderation request + message. + document (:class:`google.cloud.language_v1.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.ModerateTextResponse: + The document moderation response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.ModerateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.moderate_text, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def annotate_text(self, + request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + features: Optional[language_service.AnnotateTextRequest.Features] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnnotateTextResponse: + r"""A convenience method that provides all the features + that analyzeSentiment, analyzeEntities, and + analyzeSyntax provide in one call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + async def sample_annotate_text(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = await client.annotate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1.types.AnnotateTextRequest, dict]]): + The request object. The request message for the text + annotation API, which can perform + multiple analysis types (sentiment, + entities, and syntax) in one call. + document (:class:`google.cloud.language_v1.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (:class:`google.cloud.language_v1.types.AnnotateTextRequest.Features`): + Required. The enabled features. + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnnotateTextResponse: + The text annotations response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, features, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnnotateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if features is not None: + request.features = features + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.annotate_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "LanguageServiceAsyncClient", +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py new file mode 100644 index 00000000..0a25db80 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py @@ -0,0 +1,1115 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.language_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.language_v1.types import language_service +from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import LanguageServiceGrpcTransport +from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .transports.rest import LanguageServiceRestTransport + + +class LanguageServiceClientMeta(type): + """Metaclass for the LanguageService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] + _transport_registry["grpc"] = LanguageServiceGrpcTransport + _transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport + _transport_registry["rest"] = LanguageServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[LanguageServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LanguageServiceClient(metaclass=LanguageServiceClientMeta): + """Provides text analysis operations such as sentiment analysis + and entity recognition. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "language.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LanguageServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LanguageServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, LanguageServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the language service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, LanguageServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, LanguageServiceTransport): + # transport is a LanguageServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def analyze_sentiment(self, + request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: + r"""Analyzes the sentiment of the provided text. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + def sample_analyze_sentiment(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1.types.AnalyzeSentimentRequest, dict]): + The request object. The sentiment analysis request + message. + document (google.cloud.language_v1.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate sentence offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeSentimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeSentimentRequest): + request = language_service.AnalyzeSentimentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_sentiment] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_entities(self, + request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + def sample_analyze_entities(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = client.analyze_entities(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1.types.AnalyzeEntitiesRequest, dict]): + The request object. The entity analysis request message. + document (google.cloud.language_v1.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeEntitiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeEntitiesRequest): + request = language_service.AnalyzeEntitiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_entities] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_entity_sentiment(self, + request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1.types.AnalyzeEntitySentimentRequest, dict]): + The request object. The entity-level sentiment analysis + request message. + document (google.cloud.language_v1.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeEntitySentimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeEntitySentimentRequest): + request = language_service.AnalyzeEntitySentimentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_entity_sentiment] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_syntax(self, + request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + def sample_analyze_syntax(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = client.analyze_syntax(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1.types.AnalyzeSyntaxRequest, dict]): + The request object. The syntax analysis request message. + document (google.cloud.language_v1.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeSyntaxRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeSyntaxRequest): + request = language_service.AnalyzeSyntaxRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_syntax] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def classify_text(self, + request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ClassifyTextResponse: + r"""Classifies a document into categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + def sample_classify_text(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = client.classify_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1.types.ClassifyTextRequest, dict]): + The request object. The document classification request + message. + document (google.cloud.language_v1.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.ClassifyTextResponse: + The document classification response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.ClassifyTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.ClassifyTextRequest): + request = language_service.ClassifyTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.classify_text] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def moderate_text(self, + request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ModerateTextResponse: + r"""Moderates a document for harmful and sensitive + categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + def sample_moderate_text(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.ModerateTextRequest( + document=document, + ) + + # Make the request + response = client.moderate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1.types.ModerateTextRequest, dict]): + The request object. The document moderation request + message. + document (google.cloud.language_v1.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.ModerateTextResponse: + The document moderation response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.ModerateTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.ModerateTextRequest): + request = language_service.ModerateTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.moderate_text] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def annotate_text(self, + request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + features: Optional[language_service.AnnotateTextRequest.Features] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnnotateTextResponse: + r"""A convenience method that provides all the features + that analyzeSentiment, analyzeEntities, and + analyzeSyntax provide in one call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + def sample_annotate_text(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = client.annotate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1.types.AnnotateTextRequest, dict]): + The request object. The request message for the text + annotation API, which can perform + multiple analysis types (sentiment, + entities, and syntax) in one call. + document (google.cloud.language_v1.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (google.cloud.language_v1.types.AnnotateTextRequest.Features): + Required. The enabled features. + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.AnnotateTextResponse: + The text annotations response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, features, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnnotateTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnnotateTextRequest): + request = language_service.AnnotateTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if features is not None: + request.features = features + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.annotate_text] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "LanguageServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "LanguageServiceClient", +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py new file mode 100644 index 00000000..3cb6ab92 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import LanguageServiceTransport +from .grpc import LanguageServiceGrpcTransport +from .grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .rest import LanguageServiceRestTransport +from .rest import LanguageServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] +_transport_registry['grpc'] = LanguageServiceGrpcTransport +_transport_registry['grpc_asyncio'] = LanguageServiceGrpcAsyncIOTransport +_transport_registry['rest'] = LanguageServiceRestTransport + +__all__ = ( + 'LanguageServiceTransport', + 'LanguageServiceGrpcTransport', + 'LanguageServiceGrpcAsyncIOTransport', + 'LanguageServiceRestTransport', + 'LanguageServiceRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py new file mode 100644 index 00000000..5b45658f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.language_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.language_v1.types import language_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class LanguageServiceTransport(abc.ABC): + """Abstract transport class for LanguageService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'language.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.analyze_sentiment: gapic_v1.method.wrap_method( + self.analyze_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_entities: gapic_v1.method.wrap_method( + self.analyze_entities, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_entity_sentiment: gapic_v1.method.wrap_method( + self.analyze_entity_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_syntax: gapic_v1.method.wrap_method( + self.analyze_syntax, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.classify_text: gapic_v1.method.wrap_method( + self.classify_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.moderate_text: gapic_v1.method.wrap_method( + self.moderate_text, + default_timeout=None, + client_info=client_info, + ), + self.annotate_text: gapic_v1.method.wrap_method( + self.annotate_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + Union[ + language_service.AnalyzeSentimentResponse, + Awaitable[language_service.AnalyzeSentimentResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + Union[ + language_service.AnalyzeEntitiesResponse, + Awaitable[language_service.AnalyzeEntitiesResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + Union[ + language_service.AnalyzeEntitySentimentResponse, + Awaitable[language_service.AnalyzeEntitySentimentResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + Union[ + language_service.AnalyzeSyntaxResponse, + Awaitable[language_service.AnalyzeSyntaxResponse] + ]]: + raise NotImplementedError() + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + Union[ + language_service.ClassifyTextResponse, + Awaitable[language_service.ClassifyTextResponse] + ]]: + raise NotImplementedError() + + @property + def moderate_text(self) -> Callable[ + [language_service.ModerateTextRequest], + Union[ + language_service.ModerateTextResponse, + Awaitable[language_service.ModerateTextResponse] + ]]: + raise NotImplementedError() + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + Union[ + language_service.AnnotateTextResponse, + Awaitable[language_service.AnnotateTextResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'LanguageServiceTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py new file mode 100644 index 00000000..5cb5acd2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py @@ -0,0 +1,432 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.language_v1.types import language_service +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO + + +class LanguageServiceGrpcTransport(LanguageServiceTransport): + """gRPC backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + language_service.AnalyzeSentimentResponse]: + r"""Return a callable for the analyze sentiment method over gRPC. + + Analyzes the sentiment of the provided text. + + Returns: + Callable[[~.AnalyzeSentimentRequest], + ~.AnalyzeSentimentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_sentiment' not in self._stubs: + self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSentiment', + request_serializer=language_service.AnalyzeSentimentRequest.serialize, + response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, + ) + return self._stubs['analyze_sentiment'] + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + language_service.AnalyzeEntitiesResponse]: + r"""Return a callable for the analyze entities method over gRPC. + + Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Returns: + Callable[[~.AnalyzeEntitiesRequest], + ~.AnalyzeEntitiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entities' not in self._stubs: + self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntities', + request_serializer=language_service.AnalyzeEntitiesRequest.serialize, + response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, + ) + return self._stubs['analyze_entities'] + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + language_service.AnalyzeEntitySentimentResponse]: + r"""Return a callable for the analyze entity sentiment method over gRPC. + + Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Returns: + Callable[[~.AnalyzeEntitySentimentRequest], + ~.AnalyzeEntitySentimentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entity_sentiment' not in self._stubs: + self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment', + request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, + response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, + ) + return self._stubs['analyze_entity_sentiment'] + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + language_service.AnalyzeSyntaxResponse]: + r"""Return a callable for the analyze syntax method over gRPC. + + Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + Returns: + Callable[[~.AnalyzeSyntaxRequest], + ~.AnalyzeSyntaxResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_syntax' not in self._stubs: + self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', + request_serializer=language_service.AnalyzeSyntaxRequest.serialize, + response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, + ) + return self._stubs['analyze_syntax'] + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + language_service.ClassifyTextResponse]: + r"""Return a callable for the classify text method over gRPC. + + Classifies a document into categories. + + Returns: + Callable[[~.ClassifyTextRequest], + ~.ClassifyTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'classify_text' not in self._stubs: + self._stubs['classify_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/ClassifyText', + request_serializer=language_service.ClassifyTextRequest.serialize, + response_deserializer=language_service.ClassifyTextResponse.deserialize, + ) + return self._stubs['classify_text'] + + @property + def moderate_text(self) -> Callable[ + [language_service.ModerateTextRequest], + language_service.ModerateTextResponse]: + r"""Return a callable for the moderate text method over gRPC. + + Moderates a document for harmful and sensitive + categories. + + Returns: + Callable[[~.ModerateTextRequest], + ~.ModerateTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'moderate_text' not in self._stubs: + self._stubs['moderate_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/ModerateText', + request_serializer=language_service.ModerateTextRequest.serialize, + response_deserializer=language_service.ModerateTextResponse.deserialize, + ) + return self._stubs['moderate_text'] + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + language_service.AnnotateTextResponse]: + r"""Return a callable for the annotate text method over gRPC. + + A convenience method that provides all the features + that analyzeSentiment, analyzeEntities, and + analyzeSyntax provide in one call. + + Returns: + Callable[[~.AnnotateTextRequest], + ~.AnnotateTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'annotate_text' not in self._stubs: + self._stubs['annotate_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnnotateText', + request_serializer=language_service.AnnotateTextRequest.serialize, + response_deserializer=language_service.AnnotateTextResponse.deserialize, + ) + return self._stubs['annotate_text'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'LanguageServiceGrpcTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..8839befb --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py @@ -0,0 +1,431 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.language_v1.types import language_service +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import LanguageServiceGrpcTransport + + +class LanguageServiceGrpcAsyncIOTransport(LanguageServiceTransport): + """gRPC AsyncIO backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + Awaitable[language_service.AnalyzeSentimentResponse]]: + r"""Return a callable for the analyze sentiment method over gRPC. + + Analyzes the sentiment of the provided text. + + Returns: + Callable[[~.AnalyzeSentimentRequest], + Awaitable[~.AnalyzeSentimentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_sentiment' not in self._stubs: + self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSentiment', + request_serializer=language_service.AnalyzeSentimentRequest.serialize, + response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, + ) + return self._stubs['analyze_sentiment'] + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + Awaitable[language_service.AnalyzeEntitiesResponse]]: + r"""Return a callable for the analyze entities method over gRPC. + + Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Returns: + Callable[[~.AnalyzeEntitiesRequest], + Awaitable[~.AnalyzeEntitiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entities' not in self._stubs: + self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntities', + request_serializer=language_service.AnalyzeEntitiesRequest.serialize, + response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, + ) + return self._stubs['analyze_entities'] + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + Awaitable[language_service.AnalyzeEntitySentimentResponse]]: + r"""Return a callable for the analyze entity sentiment method over gRPC. + + Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Returns: + Callable[[~.AnalyzeEntitySentimentRequest], + Awaitable[~.AnalyzeEntitySentimentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entity_sentiment' not in self._stubs: + self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment', + request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, + response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, + ) + return self._stubs['analyze_entity_sentiment'] + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + Awaitable[language_service.AnalyzeSyntaxResponse]]: + r"""Return a callable for the analyze syntax method over gRPC. + + Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + Returns: + Callable[[~.AnalyzeSyntaxRequest], + Awaitable[~.AnalyzeSyntaxResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_syntax' not in self._stubs: + self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', + request_serializer=language_service.AnalyzeSyntaxRequest.serialize, + response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, + ) + return self._stubs['analyze_syntax'] + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + Awaitable[language_service.ClassifyTextResponse]]: + r"""Return a callable for the classify text method over gRPC. + + Classifies a document into categories. + + Returns: + Callable[[~.ClassifyTextRequest], + Awaitable[~.ClassifyTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'classify_text' not in self._stubs: + self._stubs['classify_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/ClassifyText', + request_serializer=language_service.ClassifyTextRequest.serialize, + response_deserializer=language_service.ClassifyTextResponse.deserialize, + ) + return self._stubs['classify_text'] + + @property + def moderate_text(self) -> Callable[ + [language_service.ModerateTextRequest], + Awaitable[language_service.ModerateTextResponse]]: + r"""Return a callable for the moderate text method over gRPC. + + Moderates a document for harmful and sensitive + categories. + + Returns: + Callable[[~.ModerateTextRequest], + Awaitable[~.ModerateTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'moderate_text' not in self._stubs: + self._stubs['moderate_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/ModerateText', + request_serializer=language_service.ModerateTextRequest.serialize, + response_deserializer=language_service.ModerateTextResponse.deserialize, + ) + return self._stubs['moderate_text'] + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + Awaitable[language_service.AnnotateTextResponse]]: + r"""Return a callable for the annotate text method over gRPC. + + A convenience method that provides all the features + that analyzeSentiment, analyzeEntities, and + analyzeSyntax provide in one call. + + Returns: + Callable[[~.AnnotateTextRequest], + Awaitable[~.AnnotateTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'annotate_text' not in self._stubs: + self._stubs['annotate_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnnotateText', + request_serializer=language_service.AnnotateTextRequest.serialize, + response_deserializer=language_service.AnnotateTextResponse.deserialize, + ) + return self._stubs['annotate_text'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'LanguageServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py new file mode 100644 index 00000000..eb368cc9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py @@ -0,0 +1,1029 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.language_v1.types import language_service + +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class LanguageServiceRestInterceptor: + """Interceptor for LanguageService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the LanguageServiceRestTransport. + + .. code-block:: python + class MyCustomLanguageServiceInterceptor(LanguageServiceRestInterceptor): + def pre_analyze_entities(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_entities(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_entity_sentiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_entity_sentiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_sentiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_sentiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_syntax(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_syntax(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_annotate_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_annotate_text(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_classify_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_classify_text(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_moderate_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_moderate_text(self, response): + logging.log(f"Received response: {response}") + return response + + transport = LanguageServiceRestTransport(interceptor=MyCustomLanguageServiceInterceptor()) + client = LanguageServiceClient(transport=transport) + + + """ + def pre_analyze_entities(self, request: language_service.AnalyzeEntitiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_entities + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_entities(self, response: language_service.AnalyzeEntitiesResponse) -> language_service.AnalyzeEntitiesResponse: + """Post-rpc interceptor for analyze_entities + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_analyze_entity_sentiment(self, request: language_service.AnalyzeEntitySentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitySentimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_entity_sentiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_entity_sentiment(self, response: language_service.AnalyzeEntitySentimentResponse) -> language_service.AnalyzeEntitySentimentResponse: + """Post-rpc interceptor for analyze_entity_sentiment + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_analyze_sentiment(self, request: language_service.AnalyzeSentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSentimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_sentiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_sentiment(self, response: language_service.AnalyzeSentimentResponse) -> language_service.AnalyzeSentimentResponse: + """Post-rpc interceptor for analyze_sentiment + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_analyze_syntax(self, request: language_service.AnalyzeSyntaxRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSyntaxRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_syntax + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_syntax(self, response: language_service.AnalyzeSyntaxResponse) -> language_service.AnalyzeSyntaxResponse: + """Post-rpc interceptor for analyze_syntax + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_annotate_text(self, request: language_service.AnnotateTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnnotateTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for annotate_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_annotate_text(self, response: language_service.AnnotateTextResponse) -> language_service.AnnotateTextResponse: + """Post-rpc interceptor for annotate_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_classify_text(self, request: language_service.ClassifyTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.ClassifyTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for classify_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_classify_text(self, response: language_service.ClassifyTextResponse) -> language_service.ClassifyTextResponse: + """Post-rpc interceptor for classify_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_moderate_text(self, request: language_service.ModerateTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.ModerateTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for moderate_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_moderate_text(self, response: language_service.ModerateTextResponse) -> language_service.ModerateTextResponse: + """Post-rpc interceptor for moderate_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class LanguageServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: LanguageServiceRestInterceptor + + +class LanguageServiceRestTransport(LanguageServiceTransport): + """REST backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[LanguageServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or LanguageServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AnalyzeEntities(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeEntities") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeEntitiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Call the analyze entities method over HTTP. + + Args: + request (~.language_service.AnalyzeEntitiesRequest): + The request object. The entity analysis request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/documents:analyzeEntities', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_entities(request, metadata) + pb_request = language_service.AnalyzeEntitiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeEntitiesResponse() + pb_resp = language_service.AnalyzeEntitiesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_entities(resp) + return resp + + class _AnalyzeEntitySentiment(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeEntitySentiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeEntitySentimentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Call the analyze entity sentiment method over HTTP. + + Args: + request (~.language_service.AnalyzeEntitySentimentRequest): + The request object. The entity-level sentiment analysis + request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/documents:analyzeEntitySentiment', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_entity_sentiment(request, metadata) + pb_request = language_service.AnalyzeEntitySentimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeEntitySentimentResponse() + pb_resp = language_service.AnalyzeEntitySentimentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_entity_sentiment(resp) + return resp + + class _AnalyzeSentiment(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeSentiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeSentimentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeSentimentResponse: + r"""Call the analyze sentiment method over HTTP. + + Args: + request (~.language_service.AnalyzeSentimentRequest): + The request object. The sentiment analysis request + message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/documents:analyzeSentiment', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_sentiment(request, metadata) + pb_request = language_service.AnalyzeSentimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeSentimentResponse() + pb_resp = language_service.AnalyzeSentimentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_sentiment(resp) + return resp + + class _AnalyzeSyntax(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeSyntax") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeSyntaxRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Call the analyze syntax method over HTTP. + + Args: + request (~.language_service.AnalyzeSyntaxRequest): + The request object. The syntax analysis request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/documents:analyzeSyntax', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_syntax(request, metadata) + pb_request = language_service.AnalyzeSyntaxRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeSyntaxResponse() + pb_resp = language_service.AnalyzeSyntaxResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_syntax(resp) + return resp + + class _AnnotateText(LanguageServiceRestStub): + def __hash__(self): + return hash("AnnotateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnnotateTextRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnnotateTextResponse: + r"""Call the annotate text method over HTTP. + + Args: + request (~.language_service.AnnotateTextRequest): + The request object. The request message for the text + annotation API, which can perform + multiple analysis types (sentiment, + entities, and syntax) in one call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnnotateTextResponse: + The text annotations response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/documents:annotateText', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_annotate_text(request, metadata) + pb_request = language_service.AnnotateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnnotateTextResponse() + pb_resp = language_service.AnnotateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_annotate_text(resp) + return resp + + class _ClassifyText(LanguageServiceRestStub): + def __hash__(self): + return hash("ClassifyText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.ClassifyTextRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.ClassifyTextResponse: + r"""Call the classify text method over HTTP. + + Args: + request (~.language_service.ClassifyTextRequest): + The request object. The document classification request + message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ClassifyTextResponse: + The document classification response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/documents:classifyText', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_classify_text(request, metadata) + pb_request = language_service.ClassifyTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.ClassifyTextResponse() + pb_resp = language_service.ClassifyTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_classify_text(resp) + return resp + + class _ModerateText(LanguageServiceRestStub): + def __hash__(self): + return hash("ModerateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.ModerateTextRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.ModerateTextResponse: + r"""Call the moderate text method over HTTP. + + Args: + request (~.language_service.ModerateTextRequest): + The request object. The document moderation request + message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ModerateTextResponse: + The document moderation response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/documents:moderateText', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_moderate_text(request, metadata) + pb_request = language_service.ModerateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.ModerateTextResponse() + pb_resp = language_service.ModerateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_moderate_text(resp) + return resp + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + language_service.AnalyzeEntitiesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeEntities(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + language_service.AnalyzeEntitySentimentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeEntitySentiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + language_service.AnalyzeSentimentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeSentiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + language_service.AnalyzeSyntaxResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeSyntax(self._session, self._host, self._interceptor) # type: ignore + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + language_service.AnnotateTextResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnnotateText(self._session, self._host, self._interceptor) # type: ignore + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + language_service.ClassifyTextResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ClassifyText(self._session, self._host, self._interceptor) # type: ignore + + @property + def moderate_text(self) -> Callable[ + [language_service.ModerateTextRequest], + language_service.ModerateTextResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ModerateText(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'LanguageServiceRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py new file mode 100644 index 00000000..8dadfa8a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .language_service import ( + AnalyzeEntitiesRequest, + AnalyzeEntitiesResponse, + AnalyzeEntitySentimentRequest, + AnalyzeEntitySentimentResponse, + AnalyzeSentimentRequest, + AnalyzeSentimentResponse, + AnalyzeSyntaxRequest, + AnalyzeSyntaxResponse, + AnnotateTextRequest, + AnnotateTextResponse, + ClassificationCategory, + ClassificationModelOptions, + ClassifyTextRequest, + ClassifyTextResponse, + DependencyEdge, + Document, + Entity, + EntityMention, + ModerateTextRequest, + ModerateTextResponse, + PartOfSpeech, + Sentence, + Sentiment, + TextSpan, + Token, + EncodingType, +) + +__all__ = ( + 'AnalyzeEntitiesRequest', + 'AnalyzeEntitiesResponse', + 'AnalyzeEntitySentimentRequest', + 'AnalyzeEntitySentimentResponse', + 'AnalyzeSentimentRequest', + 'AnalyzeSentimentResponse', + 'AnalyzeSyntaxRequest', + 'AnalyzeSyntaxResponse', + 'AnnotateTextRequest', + 'AnnotateTextResponse', + 'ClassificationCategory', + 'ClassificationModelOptions', + 'ClassifyTextRequest', + 'ClassifyTextResponse', + 'DependencyEdge', + 'Document', + 'Entity', + 'EntityMention', + 'ModerateTextRequest', + 'ModerateTextResponse', + 'PartOfSpeech', + 'Sentence', + 'Sentiment', + 'TextSpan', + 'Token', + 'EncodingType', +) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py b/owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py new file mode 100644 index 00000000..56d1d3ed --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py @@ -0,0 +1,1724 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.language.v1', + manifest={ + 'EncodingType', + 'Document', + 'Sentence', + 'Entity', + 'Token', + 'Sentiment', + 'PartOfSpeech', + 'DependencyEdge', + 'EntityMention', + 'TextSpan', + 'ClassificationCategory', + 'ClassificationModelOptions', + 'AnalyzeSentimentRequest', + 'AnalyzeSentimentResponse', + 'AnalyzeEntitySentimentRequest', + 'AnalyzeEntitySentimentResponse', + 'AnalyzeEntitiesRequest', + 'AnalyzeEntitiesResponse', + 'AnalyzeSyntaxRequest', + 'AnalyzeSyntaxResponse', + 'ClassifyTextRequest', + 'ClassifyTextResponse', + 'ModerateTextRequest', + 'ModerateTextResponse', + 'AnnotateTextRequest', + 'AnnotateTextResponse', + }, +) + + +class EncodingType(proto.Enum): + r"""Represents the text encoding that the caller uses to process the + output. Providing an ``EncodingType`` is recommended because the API + provides the beginning offsets for various outputs, such as tokens + and mentions, and languages that natively use different text + encodings may access offsets differently. + + Values: + NONE (0): + If ``EncodingType`` is not specified, encoding-dependent + information (such as ``begin_offset``) will be set at + ``-1``. + UTF8 (1): + Encoding-dependent information (such as ``begin_offset``) is + calculated based on the UTF-8 encoding of the input. C++ and + Go are examples of languages that use this encoding + natively. + UTF16 (2): + Encoding-dependent information (such as ``begin_offset``) is + calculated based on the UTF-16 encoding of the input. Java + and JavaScript are examples of languages that use this + encoding natively. + UTF32 (3): + Encoding-dependent information (such as ``begin_offset``) is + calculated based on the UTF-32 encoding of the input. Python + is an example of a language that uses this encoding + natively. + """ + NONE = 0 + UTF8 = 1 + UTF16 = 2 + UTF32 = 3 + + +class Document(proto.Message): + r"""Represents the input to API methods. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.language_v1.types.Document.Type): + Required. If the type is not set or is ``TYPE_UNSPECIFIED``, + returns an ``INVALID_ARGUMENT`` error. + content (str): + The content of the input in string format. + Cloud audit logging exempt since it is based on + user data. + + This field is a member of `oneof`_ ``source``. + gcs_content_uri (str): + The Google Cloud Storage URI where the file content is + located. This URI must be of the form: + gs://bucket_name/object_name. For more details, see + https://cloud.google.com/storage/docs/reference-uris. NOTE: + Cloud Storage object versioning is not supported. + + This field is a member of `oneof`_ ``source``. + language (str): + The language of the document (if not specified, the language + is automatically detected). Both ISO and BCP-47 language + codes are accepted. `Language + Support `__ + lists currently supported languages for each API method. If + the language (either specified by the caller or + automatically detected) is not supported by the called API + method, an ``INVALID_ARGUMENT`` error is returned. + """ + class Type(proto.Enum): + r"""The document types enum. + + Values: + TYPE_UNSPECIFIED (0): + The content type is not specified. + PLAIN_TEXT (1): + Plain text + HTML (2): + HTML + """ + TYPE_UNSPECIFIED = 0 + PLAIN_TEXT = 1 + HTML = 2 + + type_: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + content: str = proto.Field( + proto.STRING, + number=2, + oneof='source', + ) + gcs_content_uri: str = proto.Field( + proto.STRING, + number=3, + oneof='source', + ) + language: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Sentence(proto.Message): + r"""Represents a sentence in the input document. + + Attributes: + text (google.cloud.language_v1.types.TextSpan): + The sentence text. + sentiment (google.cloud.language_v1.types.Sentiment): + For calls to [AnalyzeSentiment][] or if + [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment] + is set to true, this field will contain the sentiment for + the sentence. + """ + + text: 'TextSpan' = proto.Field( + proto.MESSAGE, + number=1, + message='TextSpan', + ) + sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=2, + message='Sentiment', + ) + + +class Entity(proto.Message): + r"""Represents a phrase in the text that is a known entity, such + as a person, an organization, or location. The API associates + information, such as salience and mentions, with entities. + + Attributes: + name (str): + The representative name for the entity. + type_ (google.cloud.language_v1.types.Entity.Type): + The entity type. + metadata (MutableMapping[str, str]): + Metadata associated with the entity. + + For most entity types, the metadata is a Wikipedia URL + (``wikipedia_url``) and Knowledge Graph MID (``mid``), if + they are available. For the metadata associated with other + entity types, see the Type table below. + salience (float): + The salience score associated with the entity in the [0, + 1.0] range. + + The salience score for an entity provides information about + the importance or centrality of that entity to the entire + document text. Scores closer to 0 are less salient, while + scores closer to 1.0 are highly salient. + mentions (MutableSequence[google.cloud.language_v1.types.EntityMention]): + The mentions of this entity in the input + document. The API currently supports proper noun + mentions. + sentiment (google.cloud.language_v1.types.Sentiment): + For calls to [AnalyzeEntitySentiment][] or if + [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entity_sentiment] + is set to true, this field will contain the aggregate + sentiment expressed for this entity in the provided + document. + """ + class Type(proto.Enum): + r"""The type of the entity. For most entity types, the associated + metadata is a Wikipedia URL (``wikipedia_url``) and Knowledge Graph + MID (``mid``). The table below lists the associated fields for + entities that have different metadata. + + Values: + UNKNOWN (0): + Unknown + PERSON (1): + Person + LOCATION (2): + Location + ORGANIZATION (3): + Organization + EVENT (4): + Event + WORK_OF_ART (5): + Artwork + CONSUMER_GOOD (6): + Consumer product + OTHER (7): + Other types of entities + PHONE_NUMBER (9): + Phone number + + The metadata lists the phone number, formatted according to + local convention, plus whichever additional elements appear + in the text: + + - ``number`` - the actual number, broken down into sections + as per local convention + - ``national_prefix`` - country code, if detected + - ``area_code`` - region or area code, if detected + - ``extension`` - phone extension (to be dialed after + connection), if detected + ADDRESS (10): + Address + + The metadata identifies the street number and locality plus + whichever additional elements appear in the text: + + - ``street_number`` - street number + - ``locality`` - city or town + - ``street_name`` - street/route name, if detected + - ``postal_code`` - postal code, if detected + - ``country`` - country, if detected< + - ``broad_region`` - administrative area, such as the + state, if detected + - ``narrow_region`` - smaller administrative area, such as + county, if detected + - ``sublocality`` - used in Asian addresses to demark a + district within a city, if detected + DATE (11): + Date + + The metadata identifies the components of the date: + + - ``year`` - four digit year, if detected + - ``month`` - two digit month number, if detected + - ``day`` - two digit day number, if detected + NUMBER (12): + Number + The metadata is the number itself. + PRICE (13): + Price + + The metadata identifies the ``value`` and ``currency``. + """ + UNKNOWN = 0 + PERSON = 1 + LOCATION = 2 + ORGANIZATION = 3 + EVENT = 4 + WORK_OF_ART = 5 + CONSUMER_GOOD = 6 + OTHER = 7 + PHONE_NUMBER = 9 + ADDRESS = 10 + DATE = 11 + NUMBER = 12 + PRICE = 13 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: Type = proto.Field( + proto.ENUM, + number=2, + enum=Type, + ) + metadata: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + salience: float = proto.Field( + proto.FLOAT, + number=4, + ) + mentions: MutableSequence['EntityMention'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='EntityMention', + ) + sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=6, + message='Sentiment', + ) + + +class Token(proto.Message): + r"""Represents the smallest syntactic building block of the text. + + Attributes: + text (google.cloud.language_v1.types.TextSpan): + The token text. + part_of_speech (google.cloud.language_v1.types.PartOfSpeech): + Parts of speech tag for this token. + dependency_edge (google.cloud.language_v1.types.DependencyEdge): + Dependency tree parse for this token. + lemma (str): + `Lemma `__ + of the token. + """ + + text: 'TextSpan' = proto.Field( + proto.MESSAGE, + number=1, + message='TextSpan', + ) + part_of_speech: 'PartOfSpeech' = proto.Field( + proto.MESSAGE, + number=2, + message='PartOfSpeech', + ) + dependency_edge: 'DependencyEdge' = proto.Field( + proto.MESSAGE, + number=3, + message='DependencyEdge', + ) + lemma: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Sentiment(proto.Message): + r"""Represents the feeling associated with the entire text or + entities in the text. + + Attributes: + magnitude (float): + A non-negative number in the [0, +inf) range, which + represents the absolute magnitude of sentiment regardless of + score (positive or negative). + score (float): + Sentiment score between -1.0 (negative + sentiment) and 1.0 (positive sentiment). + """ + + magnitude: float = proto.Field( + proto.FLOAT, + number=2, + ) + score: float = proto.Field( + proto.FLOAT, + number=3, + ) + + +class PartOfSpeech(proto.Message): + r"""Represents part of speech information for a token. Parts of speech + are as defined in + http://www.lrec-conf.org/proceedings/lrec2012/pdf/274_Paper.pdf + + Attributes: + tag (google.cloud.language_v1.types.PartOfSpeech.Tag): + The part of speech tag. + aspect (google.cloud.language_v1.types.PartOfSpeech.Aspect): + The grammatical aspect. + case (google.cloud.language_v1.types.PartOfSpeech.Case): + The grammatical case. + form (google.cloud.language_v1.types.PartOfSpeech.Form): + The grammatical form. + gender (google.cloud.language_v1.types.PartOfSpeech.Gender): + The grammatical gender. + mood (google.cloud.language_v1.types.PartOfSpeech.Mood): + The grammatical mood. + number (google.cloud.language_v1.types.PartOfSpeech.Number): + The grammatical number. + person (google.cloud.language_v1.types.PartOfSpeech.Person): + The grammatical person. + proper (google.cloud.language_v1.types.PartOfSpeech.Proper): + The grammatical properness. + reciprocity (google.cloud.language_v1.types.PartOfSpeech.Reciprocity): + The grammatical reciprocity. + tense (google.cloud.language_v1.types.PartOfSpeech.Tense): + The grammatical tense. + voice (google.cloud.language_v1.types.PartOfSpeech.Voice): + The grammatical voice. + """ + class Tag(proto.Enum): + r"""The part of speech tags enum. + + Values: + UNKNOWN (0): + Unknown + ADJ (1): + Adjective + ADP (2): + Adposition (preposition and postposition) + ADV (3): + Adverb + CONJ (4): + Conjunction + DET (5): + Determiner + NOUN (6): + Noun (common and proper) + NUM (7): + Cardinal number + PRON (8): + Pronoun + PRT (9): + Particle or other function word + PUNCT (10): + Punctuation + VERB (11): + Verb (all tenses and modes) + X (12): + Other: foreign words, typos, abbreviations + AFFIX (13): + Affix + """ + UNKNOWN = 0 + ADJ = 1 + ADP = 2 + ADV = 3 + CONJ = 4 + DET = 5 + NOUN = 6 + NUM = 7 + PRON = 8 + PRT = 9 + PUNCT = 10 + VERB = 11 + X = 12 + AFFIX = 13 + + class Aspect(proto.Enum): + r"""The characteristic of a verb that expresses time flow during + an event. + + Values: + ASPECT_UNKNOWN (0): + Aspect is not applicable in the analyzed + language or is not predicted. + PERFECTIVE (1): + Perfective + IMPERFECTIVE (2): + Imperfective + PROGRESSIVE (3): + Progressive + """ + ASPECT_UNKNOWN = 0 + PERFECTIVE = 1 + IMPERFECTIVE = 2 + PROGRESSIVE = 3 + + class Case(proto.Enum): + r"""The grammatical function performed by a noun or pronoun in a + phrase, clause, or sentence. In some languages, other parts of + speech, such as adjective and determiner, take case inflection + in agreement with the noun. + + Values: + CASE_UNKNOWN (0): + Case is not applicable in the analyzed + language or is not predicted. + ACCUSATIVE (1): + Accusative + ADVERBIAL (2): + Adverbial + COMPLEMENTIVE (3): + Complementive + DATIVE (4): + Dative + GENITIVE (5): + Genitive + INSTRUMENTAL (6): + Instrumental + LOCATIVE (7): + Locative + NOMINATIVE (8): + Nominative + OBLIQUE (9): + Oblique + PARTITIVE (10): + Partitive + PREPOSITIONAL (11): + Prepositional + REFLEXIVE_CASE (12): + Reflexive + RELATIVE_CASE (13): + Relative + VOCATIVE (14): + Vocative + """ + CASE_UNKNOWN = 0 + ACCUSATIVE = 1 + ADVERBIAL = 2 + COMPLEMENTIVE = 3 + DATIVE = 4 + GENITIVE = 5 + INSTRUMENTAL = 6 + LOCATIVE = 7 + NOMINATIVE = 8 + OBLIQUE = 9 + PARTITIVE = 10 + PREPOSITIONAL = 11 + REFLEXIVE_CASE = 12 + RELATIVE_CASE = 13 + VOCATIVE = 14 + + class Form(proto.Enum): + r"""Depending on the language, Form can be categorizing different + forms of verbs, adjectives, adverbs, etc. For example, + categorizing inflected endings of verbs and adjectives or + distinguishing between short and long forms of adjectives and + participles + + Values: + FORM_UNKNOWN (0): + Form is not applicable in the analyzed + language or is not predicted. + ADNOMIAL (1): + Adnomial + AUXILIARY (2): + Auxiliary + COMPLEMENTIZER (3): + Complementizer + FINAL_ENDING (4): + Final ending + GERUND (5): + Gerund + REALIS (6): + Realis + IRREALIS (7): + Irrealis + SHORT (8): + Short form + LONG (9): + Long form + ORDER (10): + Order form + SPECIFIC (11): + Specific form + """ + FORM_UNKNOWN = 0 + ADNOMIAL = 1 + AUXILIARY = 2 + COMPLEMENTIZER = 3 + FINAL_ENDING = 4 + GERUND = 5 + REALIS = 6 + IRREALIS = 7 + SHORT = 8 + LONG = 9 + ORDER = 10 + SPECIFIC = 11 + + class Gender(proto.Enum): + r"""Gender classes of nouns reflected in the behaviour of + associated words. + + Values: + GENDER_UNKNOWN (0): + Gender is not applicable in the analyzed + language or is not predicted. + FEMININE (1): + Feminine + MASCULINE (2): + Masculine + NEUTER (3): + Neuter + """ + GENDER_UNKNOWN = 0 + FEMININE = 1 + MASCULINE = 2 + NEUTER = 3 + + class Mood(proto.Enum): + r"""The grammatical feature of verbs, used for showing modality + and attitude. + + Values: + MOOD_UNKNOWN (0): + Mood is not applicable in the analyzed + language or is not predicted. + CONDITIONAL_MOOD (1): + Conditional + IMPERATIVE (2): + Imperative + INDICATIVE (3): + Indicative + INTERROGATIVE (4): + Interrogative + JUSSIVE (5): + Jussive + SUBJUNCTIVE (6): + Subjunctive + """ + MOOD_UNKNOWN = 0 + CONDITIONAL_MOOD = 1 + IMPERATIVE = 2 + INDICATIVE = 3 + INTERROGATIVE = 4 + JUSSIVE = 5 + SUBJUNCTIVE = 6 + + class Number(proto.Enum): + r"""Count distinctions. + + Values: + NUMBER_UNKNOWN (0): + Number is not applicable in the analyzed + language or is not predicted. + SINGULAR (1): + Singular + PLURAL (2): + Plural + DUAL (3): + Dual + """ + NUMBER_UNKNOWN = 0 + SINGULAR = 1 + PLURAL = 2 + DUAL = 3 + + class Person(proto.Enum): + r"""The distinction between the speaker, second person, third + person, etc. + + Values: + PERSON_UNKNOWN (0): + Person is not applicable in the analyzed + language or is not predicted. + FIRST (1): + First + SECOND (2): + Second + THIRD (3): + Third + REFLEXIVE_PERSON (4): + Reflexive + """ + PERSON_UNKNOWN = 0 + FIRST = 1 + SECOND = 2 + THIRD = 3 + REFLEXIVE_PERSON = 4 + + class Proper(proto.Enum): + r"""This category shows if the token is part of a proper name. + + Values: + PROPER_UNKNOWN (0): + Proper is not applicable in the analyzed + language or is not predicted. + PROPER (1): + Proper + NOT_PROPER (2): + Not proper + """ + PROPER_UNKNOWN = 0 + PROPER = 1 + NOT_PROPER = 2 + + class Reciprocity(proto.Enum): + r"""Reciprocal features of a pronoun. + + Values: + RECIPROCITY_UNKNOWN (0): + Reciprocity is not applicable in the analyzed + language or is not predicted. + RECIPROCAL (1): + Reciprocal + NON_RECIPROCAL (2): + Non-reciprocal + """ + RECIPROCITY_UNKNOWN = 0 + RECIPROCAL = 1 + NON_RECIPROCAL = 2 + + class Tense(proto.Enum): + r"""Time reference. + + Values: + TENSE_UNKNOWN (0): + Tense is not applicable in the analyzed + language or is not predicted. + CONDITIONAL_TENSE (1): + Conditional + FUTURE (2): + Future + PAST (3): + Past + PRESENT (4): + Present + IMPERFECT (5): + Imperfect + PLUPERFECT (6): + Pluperfect + """ + TENSE_UNKNOWN = 0 + CONDITIONAL_TENSE = 1 + FUTURE = 2 + PAST = 3 + PRESENT = 4 + IMPERFECT = 5 + PLUPERFECT = 6 + + class Voice(proto.Enum): + r"""The relationship between the action that a verb expresses and + the participants identified by its arguments. + + Values: + VOICE_UNKNOWN (0): + Voice is not applicable in the analyzed + language or is not predicted. + ACTIVE (1): + Active + CAUSATIVE (2): + Causative + PASSIVE (3): + Passive + """ + VOICE_UNKNOWN = 0 + ACTIVE = 1 + CAUSATIVE = 2 + PASSIVE = 3 + + tag: Tag = proto.Field( + proto.ENUM, + number=1, + enum=Tag, + ) + aspect: Aspect = proto.Field( + proto.ENUM, + number=2, + enum=Aspect, + ) + case: Case = proto.Field( + proto.ENUM, + number=3, + enum=Case, + ) + form: Form = proto.Field( + proto.ENUM, + number=4, + enum=Form, + ) + gender: Gender = proto.Field( + proto.ENUM, + number=5, + enum=Gender, + ) + mood: Mood = proto.Field( + proto.ENUM, + number=6, + enum=Mood, + ) + number: Number = proto.Field( + proto.ENUM, + number=7, + enum=Number, + ) + person: Person = proto.Field( + proto.ENUM, + number=8, + enum=Person, + ) + proper: Proper = proto.Field( + proto.ENUM, + number=9, + enum=Proper, + ) + reciprocity: Reciprocity = proto.Field( + proto.ENUM, + number=10, + enum=Reciprocity, + ) + tense: Tense = proto.Field( + proto.ENUM, + number=11, + enum=Tense, + ) + voice: Voice = proto.Field( + proto.ENUM, + number=12, + enum=Voice, + ) + + +class DependencyEdge(proto.Message): + r"""Represents dependency parse tree information for a token. + (For more information on dependency labels, see + http://www.aclweb.org/anthology/P13-2017 + + Attributes: + head_token_index (int): + Represents the head of this token in the dependency tree. + This is the index of the token which has an arc going to + this token. The index is the position of the token in the + array of tokens returned by the API method. If this token is + a root token, then the ``head_token_index`` is its own + index. + label (google.cloud.language_v1.types.DependencyEdge.Label): + The parse label for the token. + """ + class Label(proto.Enum): + r"""The parse label enum for the token. + + Values: + UNKNOWN (0): + Unknown + ABBREV (1): + Abbreviation modifier + ACOMP (2): + Adjectival complement + ADVCL (3): + Adverbial clause modifier + ADVMOD (4): + Adverbial modifier + AMOD (5): + Adjectival modifier of an NP + APPOS (6): + Appositional modifier of an NP + ATTR (7): + Attribute dependent of a copular verb + AUX (8): + Auxiliary (non-main) verb + AUXPASS (9): + Passive auxiliary + CC (10): + Coordinating conjunction + CCOMP (11): + Clausal complement of a verb or adjective + CONJ (12): + Conjunct + CSUBJ (13): + Clausal subject + CSUBJPASS (14): + Clausal passive subject + DEP (15): + Dependency (unable to determine) + DET (16): + Determiner + DISCOURSE (17): + Discourse + DOBJ (18): + Direct object + EXPL (19): + Expletive + GOESWITH (20): + Goes with (part of a word in a text not well + edited) + IOBJ (21): + Indirect object + MARK (22): + Marker (word introducing a subordinate + clause) + MWE (23): + Multi-word expression + MWV (24): + Multi-word verbal expression + NEG (25): + Negation modifier + NN (26): + Noun compound modifier + NPADVMOD (27): + Noun phrase used as an adverbial modifier + NSUBJ (28): + Nominal subject + NSUBJPASS (29): + Passive nominal subject + NUM (30): + Numeric modifier of a noun + NUMBER (31): + Element of compound number + P (32): + Punctuation mark + PARATAXIS (33): + Parataxis relation + PARTMOD (34): + Participial modifier + PCOMP (35): + The complement of a preposition is a clause + POBJ (36): + Object of a preposition + POSS (37): + Possession modifier + POSTNEG (38): + Postverbal negative particle + PRECOMP (39): + Predicate complement + PRECONJ (40): + Preconjunt + PREDET (41): + Predeterminer + PREF (42): + Prefix + PREP (43): + Prepositional modifier + PRONL (44): + The relationship between a verb and verbal + morpheme + PRT (45): + Particle + PS (46): + Associative or possessive marker + QUANTMOD (47): + Quantifier phrase modifier + RCMOD (48): + Relative clause modifier + RCMODREL (49): + Complementizer in relative clause + RDROP (50): + Ellipsis without a preceding predicate + REF (51): + Referent + REMNANT (52): + Remnant + REPARANDUM (53): + Reparandum + ROOT (54): + Root + SNUM (55): + Suffix specifying a unit of number + SUFF (56): + Suffix + TMOD (57): + Temporal modifier + TOPIC (58): + Topic marker + VMOD (59): + Clause headed by an infinite form of the verb + that modifies a noun + VOCATIVE (60): + Vocative + XCOMP (61): + Open clausal complement + SUFFIX (62): + Name suffix + TITLE (63): + Name title + ADVPHMOD (64): + Adverbial phrase modifier + AUXCAUS (65): + Causative auxiliary + AUXVV (66): + Helper auxiliary + DTMOD (67): + Rentaishi (Prenominal modifier) + FOREIGN (68): + Foreign words + KW (69): + Keyword + LIST (70): + List for chains of comparable items + NOMC (71): + Nominalized clause + NOMCSUBJ (72): + Nominalized clausal subject + NOMCSUBJPASS (73): + Nominalized clausal passive + NUMC (74): + Compound of numeric modifier + COP (75): + Copula + DISLOCATED (76): + Dislocated relation (for fronted/topicalized + elements) + ASP (77): + Aspect marker + GMOD (78): + Genitive modifier + GOBJ (79): + Genitive object + INFMOD (80): + Infinitival modifier + MES (81): + Measure + NCOMP (82): + Nominal complement of a noun + """ + UNKNOWN = 0 + ABBREV = 1 + ACOMP = 2 + ADVCL = 3 + ADVMOD = 4 + AMOD = 5 + APPOS = 6 + ATTR = 7 + AUX = 8 + AUXPASS = 9 + CC = 10 + CCOMP = 11 + CONJ = 12 + CSUBJ = 13 + CSUBJPASS = 14 + DEP = 15 + DET = 16 + DISCOURSE = 17 + DOBJ = 18 + EXPL = 19 + GOESWITH = 20 + IOBJ = 21 + MARK = 22 + MWE = 23 + MWV = 24 + NEG = 25 + NN = 26 + NPADVMOD = 27 + NSUBJ = 28 + NSUBJPASS = 29 + NUM = 30 + NUMBER = 31 + P = 32 + PARATAXIS = 33 + PARTMOD = 34 + PCOMP = 35 + POBJ = 36 + POSS = 37 + POSTNEG = 38 + PRECOMP = 39 + PRECONJ = 40 + PREDET = 41 + PREF = 42 + PREP = 43 + PRONL = 44 + PRT = 45 + PS = 46 + QUANTMOD = 47 + RCMOD = 48 + RCMODREL = 49 + RDROP = 50 + REF = 51 + REMNANT = 52 + REPARANDUM = 53 + ROOT = 54 + SNUM = 55 + SUFF = 56 + TMOD = 57 + TOPIC = 58 + VMOD = 59 + VOCATIVE = 60 + XCOMP = 61 + SUFFIX = 62 + TITLE = 63 + ADVPHMOD = 64 + AUXCAUS = 65 + AUXVV = 66 + DTMOD = 67 + FOREIGN = 68 + KW = 69 + LIST = 70 + NOMC = 71 + NOMCSUBJ = 72 + NOMCSUBJPASS = 73 + NUMC = 74 + COP = 75 + DISLOCATED = 76 + ASP = 77 + GMOD = 78 + GOBJ = 79 + INFMOD = 80 + MES = 81 + NCOMP = 82 + + head_token_index: int = proto.Field( + proto.INT32, + number=1, + ) + label: Label = proto.Field( + proto.ENUM, + number=2, + enum=Label, + ) + + +class EntityMention(proto.Message): + r"""Represents a mention for an entity in the text. Currently, + proper noun mentions are supported. + + Attributes: + text (google.cloud.language_v1.types.TextSpan): + The mention text. + type_ (google.cloud.language_v1.types.EntityMention.Type): + The type of the entity mention. + sentiment (google.cloud.language_v1.types.Sentiment): + For calls to [AnalyzeEntitySentiment][] or if + [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entity_sentiment] + is set to true, this field will contain the sentiment + expressed for this mention of the entity in the provided + document. + """ + class Type(proto.Enum): + r"""The supported types of mentions. + + Values: + TYPE_UNKNOWN (0): + Unknown + PROPER (1): + Proper name + COMMON (2): + Common noun (or noun compound) + """ + TYPE_UNKNOWN = 0 + PROPER = 1 + COMMON = 2 + + text: 'TextSpan' = proto.Field( + proto.MESSAGE, + number=1, + message='TextSpan', + ) + type_: Type = proto.Field( + proto.ENUM, + number=2, + enum=Type, + ) + sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=3, + message='Sentiment', + ) + + +class TextSpan(proto.Message): + r"""Represents an output piece of text. + + Attributes: + content (str): + The content of the output text. + begin_offset (int): + The API calculates the beginning offset of the content in + the original document according to the + [EncodingType][google.cloud.language.v1.EncodingType] + specified in the API request. + """ + + content: str = proto.Field( + proto.STRING, + number=1, + ) + begin_offset: int = proto.Field( + proto.INT32, + number=2, + ) + + +class ClassificationCategory(proto.Message): + r"""Represents a category returned from the text classifier. + + Attributes: + name (str): + The name of the category representing the + document. + confidence (float): + The classifier's confidence of the category. + Number represents how certain the classifier is + that this category represents the given text. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class ClassificationModelOptions(proto.Message): + r"""Model options available for classification requests. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + v1_model (google.cloud.language_v1.types.ClassificationModelOptions.V1Model): + Setting this field will use the V1 model and + V1 content categories version. The V1 model is a + legacy model; support for this will be + discontinued in the future. + + This field is a member of `oneof`_ ``model_type``. + v2_model (google.cloud.language_v1.types.ClassificationModelOptions.V2Model): + Setting this field will use the V2 model with + the appropriate content categories version. The + V2 model is a better performing model. + + This field is a member of `oneof`_ ``model_type``. + """ + + class V1Model(proto.Message): + r"""Options for the V1 model. + """ + + class V2Model(proto.Message): + r"""Options for the V2 model. + + Attributes: + content_categories_version (google.cloud.language_v1.types.ClassificationModelOptions.V2Model.ContentCategoriesVersion): + The content categories used for + classification. + """ + class ContentCategoriesVersion(proto.Enum): + r"""The content categories used for classification. + + Values: + CONTENT_CATEGORIES_VERSION_UNSPECIFIED (0): + If ``ContentCategoriesVersion`` is not specified, this + option will default to ``V1``. + V1 (1): + Legacy content categories of our initial + launch in 2017. + V2 (2): + Updated content categories in 2022. + """ + CONTENT_CATEGORIES_VERSION_UNSPECIFIED = 0 + V1 = 1 + V2 = 2 + + content_categories_version: 'ClassificationModelOptions.V2Model.ContentCategoriesVersion' = proto.Field( + proto.ENUM, + number=1, + enum='ClassificationModelOptions.V2Model.ContentCategoriesVersion', + ) + + v1_model: V1Model = proto.Field( + proto.MESSAGE, + number=1, + oneof='model_type', + message=V1Model, + ) + v2_model: V2Model = proto.Field( + proto.MESSAGE, + number=2, + oneof='model_type', + message=V2Model, + ) + + +class AnalyzeSentimentRequest(proto.Message): + r"""The sentiment analysis request message. + + Attributes: + document (google.cloud.language_v1.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate sentence offsets. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeSentimentResponse(proto.Message): + r"""The sentiment analysis response message. + + Attributes: + document_sentiment (google.cloud.language_v1.types.Sentiment): + The overall sentiment of the input document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + sentences (MutableSequence[google.cloud.language_v1.types.Sentence]): + The sentiment for all the sentences in the + document. + """ + + document_sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=1, + message='Sentiment', + ) + language: str = proto.Field( + proto.STRING, + number=2, + ) + sentences: MutableSequence['Sentence'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Sentence', + ) + + +class AnalyzeEntitySentimentRequest(proto.Message): + r"""The entity-level sentiment analysis request message. + + Attributes: + document (google.cloud.language_v1.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeEntitySentimentResponse(proto.Message): + r"""The entity-level sentiment analysis response message. + + Attributes: + entities (MutableSequence[google.cloud.language_v1.types.Entity]): + The recognized entities in the input document + with associated sentiments. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + """ + + entities: MutableSequence['Entity'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Entity', + ) + language: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AnalyzeEntitiesRequest(proto.Message): + r"""The entity analysis request message. + + Attributes: + document (google.cloud.language_v1.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeEntitiesResponse(proto.Message): + r"""The entity analysis response message. + + Attributes: + entities (MutableSequence[google.cloud.language_v1.types.Entity]): + The recognized entities in the input + document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + """ + + entities: MutableSequence['Entity'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Entity', + ) + language: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AnalyzeSyntaxRequest(proto.Message): + r"""The syntax analysis request message. + + Attributes: + document (google.cloud.language_v1.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeSyntaxResponse(proto.Message): + r"""The syntax analysis response message. + + Attributes: + sentences (MutableSequence[google.cloud.language_v1.types.Sentence]): + Sentences in the input document. + tokens (MutableSequence[google.cloud.language_v1.types.Token]): + Tokens, along with their syntactic + information, in the input document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + """ + + sentences: MutableSequence['Sentence'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Sentence', + ) + tokens: MutableSequence['Token'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='Token', + ) + language: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ClassifyTextRequest(proto.Message): + r"""The document classification request message. + + Attributes: + document (google.cloud.language_v1.types.Document): + Required. Input document. + classification_model_options (google.cloud.language_v1.types.ClassificationModelOptions): + Model options to use for classification. + Defaults to v1 options if not specified. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + classification_model_options: 'ClassificationModelOptions' = proto.Field( + proto.MESSAGE, + number=3, + message='ClassificationModelOptions', + ) + + +class ClassifyTextResponse(proto.Message): + r"""The document classification response message. + + Attributes: + categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): + Categories representing the input document. + """ + + categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ClassificationCategory', + ) + + +class ModerateTextRequest(proto.Message): + r"""The document moderation request message. + + Attributes: + document (google.cloud.language_v1.types.Document): + Required. Input document. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + + +class ModerateTextResponse(proto.Message): + r"""The document moderation response message. + + Attributes: + moderation_categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): + Harmful and sensitive categories representing + the input document. + """ + + moderation_categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ClassificationCategory', + ) + + +class AnnotateTextRequest(proto.Message): + r"""The request message for the text annotation API, which can + perform multiple analysis types (sentiment, entities, and + syntax) in one call. + + Attributes: + document (google.cloud.language_v1.types.Document): + Required. Input document. + features (google.cloud.language_v1.types.AnnotateTextRequest.Features): + Required. The enabled features. + encoding_type (google.cloud.language_v1.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + class Features(proto.Message): + r"""All available features for sentiment, syntax, and semantic + analysis. Setting each one to true will enable that specific + analysis for the input. + + Attributes: + extract_syntax (bool): + Extract syntax information. + extract_entities (bool): + Extract entities. + extract_document_sentiment (bool): + Extract document-level sentiment. + extract_entity_sentiment (bool): + Extract entities and their associated + sentiment. + classify_text (bool): + Classify the full document into categories. + moderate_text (bool): + Moderate the document for harmful and + sensitive categories. + classification_model_options (google.cloud.language_v1.types.ClassificationModelOptions): + The model options to use for classification. Defaults to v1 + options if not specified. Only used if ``classify_text`` is + set to true. + """ + + extract_syntax: bool = proto.Field( + proto.BOOL, + number=1, + ) + extract_entities: bool = proto.Field( + proto.BOOL, + number=2, + ) + extract_document_sentiment: bool = proto.Field( + proto.BOOL, + number=3, + ) + extract_entity_sentiment: bool = proto.Field( + proto.BOOL, + number=4, + ) + classify_text: bool = proto.Field( + proto.BOOL, + number=6, + ) + moderate_text: bool = proto.Field( + proto.BOOL, + number=11, + ) + classification_model_options: 'ClassificationModelOptions' = proto.Field( + proto.MESSAGE, + number=10, + message='ClassificationModelOptions', + ) + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + features: Features = proto.Field( + proto.MESSAGE, + number=2, + message=Features, + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=3, + enum='EncodingType', + ) + + +class AnnotateTextResponse(proto.Message): + r"""The text annotations response message. + + Attributes: + sentences (MutableSequence[google.cloud.language_v1.types.Sentence]): + Sentences in the input document. Populated if the user + enables + [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax]. + tokens (MutableSequence[google.cloud.language_v1.types.Token]): + Tokens, along with their syntactic information, in the input + document. Populated if the user enables + [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax]. + entities (MutableSequence[google.cloud.language_v1.types.Entity]): + Entities, along with their semantic information, in the + input document. Populated if the user enables + [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entities]. + document_sentiment (google.cloud.language_v1.types.Sentiment): + The overall sentiment for the document. Populated if the + user enables + [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment]. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): + Categories identified in the input document. + moderation_categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): + Harmful and sensitive categories identified + in the input document. + """ + + sentences: MutableSequence['Sentence'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Sentence', + ) + tokens: MutableSequence['Token'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='Token', + ) + entities: MutableSequence['Entity'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Entity', + ) + document_sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=4, + message='Sentiment', + ) + language: str = proto.Field( + proto.STRING, + number=5, + ) + categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='ClassificationCategory', + ) + moderation_categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='ClassificationCategory', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini new file mode 100644 index 00000000..574c5aed --- /dev/null +++ b/owl-bot-staging/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py new file mode 100644 index 00000000..b104aa2e --- /dev/null +++ b/owl-bot-staging/v1/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/language_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py new file mode 100644 index 00000000..71f2d049 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +async def sample_analyze_entities(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entities(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeEntities_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py new file mode 100644 index 00000000..14beb557 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +def sample_analyze_entities(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = client.analyze_entities(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeEntities_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py new file mode 100644 index 00000000..a8a1b59b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntitySentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeEntitySentiment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +async def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeEntitySentiment_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py new file mode 100644 index 00000000..c6d27ac8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntitySentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeEntitySentiment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeEntitySentiment_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py new file mode 100644 index 00000000..6b65f274 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeSentiment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +async def sample_analyze_sentiment(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeSentiment_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py new file mode 100644 index 00000000..c9a48df7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeSentiment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +def sample_analyze_sentiment(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeSentiment_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py new file mode 100644 index 00000000..31640e52 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSyntax +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeSyntax_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +async def sample_analyze_syntax(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = await client.analyze_syntax(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeSyntax_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py new file mode 100644 index 00000000..947613db --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSyntax +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnalyzeSyntax_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +def sample_analyze_syntax(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = client.analyze_syntax(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnalyzeSyntax_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py new file mode 100644 index 00000000..02a54aee --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnnotateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnnotateText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +async def sample_annotate_text(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = await client.annotate_text(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnnotateText_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py new file mode 100644 index 00000000..9d90a0f0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnnotateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_AnnotateText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +def sample_annotate_text(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = client.annotate_text(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_AnnotateText_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py new file mode 100644 index 00000000..a6497c09 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ClassifyText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_ClassifyText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +async def sample_classify_text(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = await client.classify_text(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_ClassifyText_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py new file mode 100644 index 00000000..e1d32646 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ClassifyText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_ClassifyText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +def sample_classify_text(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = client.classify_text(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_ClassifyText_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_moderate_text_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_moderate_text_async.py new file mode 100644 index 00000000..6f4d033d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_moderate_text_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModerateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_ModerateText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +async def sample_moderate_text(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.ModerateTextRequest( + document=document, + ) + + # Make the request + response = await client.moderate_text(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_ModerateText_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_moderate_text_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_moderate_text_sync.py new file mode 100644 index 00000000..788b4a08 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_moderate_text_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModerateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1_generated_LanguageService_ModerateText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1 + + +def sample_moderate_text(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.ModerateTextRequest( + document=document, + ) + + # Make the request + response = client.moderate_text(request=request) + + # Handle the response + print(response) + +# [END language_v1_generated_LanguageService_ModerateText_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json new file mode 100644 index 00000000..4e481f59 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json @@ -0,0 +1,1190 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.language.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-language", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_entities", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntities", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeEntitiesRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeEntitiesResponse", + "shortName": "analyze_entities" + }, + "description": "Sample for AnalyzeEntities", + "file": "language_v1_generated_language_service_analyze_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeEntities_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_entities", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntities", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeEntitiesRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeEntitiesResponse", + "shortName": "analyze_entities" + }, + "description": "Sample for AnalyzeEntities", + "file": "language_v1_generated_language_service_analyze_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeEntities_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_entity_sentiment", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntitySentiment", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntitySentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeEntitySentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeEntitySentimentResponse", + "shortName": "analyze_entity_sentiment" + }, + "description": "Sample for AnalyzeEntitySentiment", + "file": "language_v1_generated_language_service_analyze_entity_sentiment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeEntitySentiment_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_entity_sentiment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_entity_sentiment", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntitySentiment", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntitySentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeEntitySentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeEntitySentimentResponse", + "shortName": "analyze_entity_sentiment" + }, + "description": "Sample for AnalyzeEntitySentiment", + "file": "language_v1_generated_language_service_analyze_entity_sentiment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeEntitySentiment_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_entity_sentiment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_sentiment", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSentiment", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeSentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeSentimentResponse", + "shortName": "analyze_sentiment" + }, + "description": "Sample for AnalyzeSentiment", + "file": "language_v1_generated_language_service_analyze_sentiment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeSentiment_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_sentiment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_sentiment", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSentiment", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeSentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeSentimentResponse", + "shortName": "analyze_sentiment" + }, + "description": "Sample for AnalyzeSentiment", + "file": "language_v1_generated_language_service_analyze_sentiment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeSentiment_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_sentiment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_syntax", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSyntax", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSyntax" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeSyntaxRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeSyntaxResponse", + "shortName": "analyze_syntax" + }, + "description": "Sample for AnalyzeSyntax", + "file": "language_v1_generated_language_service_analyze_syntax_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeSyntax_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_syntax_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_syntax", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSyntax", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSyntax" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnalyzeSyntaxRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnalyzeSyntaxResponse", + "shortName": "analyze_syntax" + }, + "description": "Sample for AnalyzeSyntax", + "file": "language_v1_generated_language_service_analyze_syntax_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnalyzeSyntax_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_analyze_syntax_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.annotate_text", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnnotateText", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnnotateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnnotateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "features", + "type": "google.cloud.language_v1.types.AnnotateTextRequest.Features" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnnotateTextResponse", + "shortName": "annotate_text" + }, + "description": "Sample for AnnotateText", + "file": "language_v1_generated_language_service_annotate_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnnotateText_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_annotate_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceClient.annotate_text", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.AnnotateText", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnnotateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.AnnotateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "features", + "type": "google.cloud.language_v1.types.AnnotateTextRequest.Features" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.AnnotateTextResponse", + "shortName": "annotate_text" + }, + "description": "Sample for AnnotateText", + "file": "language_v1_generated_language_service_annotate_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_AnnotateText_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_annotate_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.classify_text", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.ClassifyText", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ClassifyText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.ClassifyTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.ClassifyTextResponse", + "shortName": "classify_text" + }, + "description": "Sample for ClassifyText", + "file": "language_v1_generated_language_service_classify_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_ClassifyText_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_classify_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceClient.classify_text", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.ClassifyText", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ClassifyText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.ClassifyTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.ClassifyTextResponse", + "shortName": "classify_text" + }, + "description": "Sample for ClassifyText", + "file": "language_v1_generated_language_service_classify_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_ClassifyText_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_classify_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.moderate_text", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.ModerateText", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ModerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.ModerateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.ModerateTextResponse", + "shortName": "moderate_text" + }, + "description": "Sample for ModerateText", + "file": "language_v1_generated_language_service_moderate_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_ModerateText_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_moderate_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceClient.moderate_text", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.ModerateText", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ModerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.ModerateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.ModerateTextResponse", + "shortName": "moderate_text" + }, + "description": "Sample for ModerateText", + "file": "language_v1_generated_language_service_moderate_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_ModerateText_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_moderate_text_sync.py" + } + ] +} diff --git a/owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py new file mode 100644 index 00000000..10fa218c --- /dev/null +++ b/owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py @@ -0,0 +1,182 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class languageCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'analyze_entities': ('document', 'encoding_type', ), + 'analyze_entity_sentiment': ('document', 'encoding_type', ), + 'analyze_sentiment': ('document', 'encoding_type', ), + 'analyze_syntax': ('document', 'encoding_type', ), + 'annotate_text': ('document', 'features', 'encoding_type', ), + 'classify_text': ('document', 'classification_model_options', ), + 'moderate_text': ('document', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=languageCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the language client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py new file mode 100644 index 00000000..047e5bce --- /dev/null +++ b/owl-bot-staging/v1/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-language' + + +description = "Google Cloud Language API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/language/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-language" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v1/testing/constraints-3.10.txt b/owl-bot-staging/v1/testing/constraints-3.10.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.11.txt b/owl-bot-staging/v1/testing/constraints-3.11.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.12.txt b/owl-bot-staging/v1/testing/constraints-3.12.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.7.txt b/owl-bot-staging/v1/testing/constraints-3.7.txt new file mode 100644 index 00000000..6c44adfe --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/owl-bot-staging/v1/testing/constraints-3.8.txt b/owl-bot-staging/v1/testing/constraints-3.8.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.9.txt b/owl-bot-staging/v1/testing/constraints-3.9.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py b/owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py new file mode 100644 index 00000000..1a75b217 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py @@ -0,0 +1,4070 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.language_v1.services.language_service import LanguageServiceAsyncClient +from google.cloud.language_v1.services.language_service import LanguageServiceClient +from google.cloud.language_v1.services.language_service import transports +from google.cloud.language_v1.types import language_service +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LanguageServiceClient._get_default_mtls_endpoint(None) is None + assert LanguageServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (LanguageServiceClient, "grpc"), + (LanguageServiceAsyncClient, "grpc_asyncio"), + (LanguageServiceClient, "rest"), +]) +def test_language_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'language.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://language.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.LanguageServiceGrpcTransport, "grpc"), + (transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.LanguageServiceRestTransport, "rest"), +]) +def test_language_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (LanguageServiceClient, "grpc"), + (LanguageServiceAsyncClient, "grpc_asyncio"), + (LanguageServiceClient, "rest"), +]) +def test_language_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'language.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://language.googleapis.com' + ) + + +def test_language_service_client_get_transport_class(): + transport = LanguageServiceClient.get_transport_class() + available_transports = [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceRestTransport, + ] + assert transport in available_transports + + transport = LanguageServiceClient.get_transport_class("grpc") + assert transport == transports.LanguageServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), +]) +@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) +@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) +def test_language_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "true"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "false"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "true"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) +@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_language_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + LanguageServiceClient, LanguageServiceAsyncClient +]) +@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) +@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) +def test_language_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), +]) +def test_language_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", None), +]) +def test_language_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_language_service_client_client_options_from_dict(): + with mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = LanguageServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_language_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "language.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="language.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSentimentRequest, + dict, +]) +def test_analyze_sentiment(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse( + language='language_value', + ) + response = client.analyze_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_sentiment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + client.analyze_sentiment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSentimentRequest() + +@pytest.mark.asyncio +async def test_analyze_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSentimentRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse( + language='language_value', + )) + response = await client.analyze_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_sentiment_async_from_dict(): + await test_analyze_sentiment_async(request_type=dict) + + +def test_analyze_sentiment_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_sentiment_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_sentiment_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_sentiment_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitiesRequest, + dict, +]) +def test_analyze_entities(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse( + language='language_value', + ) + response = client.analyze_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + assert response.language == 'language_value' + + +def test_analyze_entities_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + client.analyze_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitiesRequest() + +@pytest.mark.asyncio +async def test_analyze_entities_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitiesRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse( + language='language_value', + )) + response = await client.analyze_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_entities_async_from_dict(): + await test_analyze_entities_async(request_type=dict) + + +def test_analyze_entities_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_entities( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_entities_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_entities_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_entities( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_entities_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitySentimentRequest, + dict, +]) +def test_analyze_entity_sentiment(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse( + language='language_value', + ) + response = client.analyze_entity_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_entity_sentiment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + client.analyze_entity_sentiment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitySentimentRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse( + language='language_value', + )) + response = await client.analyze_entity_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_async_from_dict(): + await test_analyze_entity_sentiment_async(request_type=dict) + + +def test_analyze_entity_sentiment_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_entity_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_entity_sentiment_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_entity_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSyntaxRequest, + dict, +]) +def test_analyze_syntax(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse( + language='language_value', + ) + response = client.analyze_syntax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSyntaxRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + assert response.language == 'language_value' + + +def test_analyze_syntax_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + client.analyze_syntax() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSyntaxRequest() + +@pytest.mark.asyncio +async def test_analyze_syntax_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSyntaxRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse( + language='language_value', + )) + response = await client.analyze_syntax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSyntaxRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_syntax_async_from_dict(): + await test_analyze_syntax_async(request_type=dict) + + +def test_analyze_syntax_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_syntax( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_syntax_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_syntax_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_syntax( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_syntax_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.ClassifyTextRequest, + dict, +]) +def test_classify_text(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse( + ) + response = client.classify_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ClassifyTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +def test_classify_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + client.classify_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ClassifyTextRequest() + +@pytest.mark.asyncio +async def test_classify_text_async(transport: str = 'grpc_asyncio', request_type=language_service.ClassifyTextRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse( + )) + response = await client.classify_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ClassifyTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +@pytest.mark.asyncio +async def test_classify_text_async_from_dict(): + await test_classify_text_async(request_type=dict) + + +def test_classify_text_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.classify_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + + +def test_classify_text_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + +@pytest.mark.asyncio +async def test_classify_text_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.classify_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_classify_text_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.ModerateTextRequest, + dict, +]) +def test_moderate_text(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse( + ) + response = client.moderate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +def test_moderate_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + client.moderate_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + +@pytest.mark.asyncio +async def test_moderate_text_async(transport: str = 'grpc_asyncio', request_type=language_service.ModerateTextRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.ModerateTextResponse( + )) + response = await client.moderate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +@pytest.mark.asyncio +async def test_moderate_text_async_from_dict(): + await test_moderate_text_async(request_type=dict) + + +def test_moderate_text_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.moderate_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + + +def test_moderate_text_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + +@pytest.mark.asyncio +async def test_moderate_text_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.ModerateTextResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.moderate_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_moderate_text_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnnotateTextRequest, + dict, +]) +def test_annotate_text(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse( + language='language_value', + ) + response = client.annotate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnnotateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + assert response.language == 'language_value' + + +def test_annotate_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + client.annotate_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnnotateTextRequest() + +@pytest.mark.asyncio +async def test_annotate_text_async(transport: str = 'grpc_asyncio', request_type=language_service.AnnotateTextRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse( + language='language_value', + )) + response = await client.annotate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnnotateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_annotate_text_async_from_dict(): + await test_annotate_text_async(request_type=dict) + + +def test_annotate_text_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.annotate_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].features + mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_annotate_text_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_annotate_text_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.annotate_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].features + mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_annotate_text_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSentimentRequest, + dict, +]) +def test_analyze_sentiment_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_sentiment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_sentiment_rest_required_fields(request_type=language_service.AnalyzeSentimentRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_sentiment(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_sentiment_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_sentiment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_sentiment_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_sentiment") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_sentiment") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeSentimentRequest.pb(language_service.AnalyzeSentimentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeSentimentResponse.to_json(language_service.AnalyzeSentimentResponse()) + + request = language_service.AnalyzeSentimentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeSentimentResponse() + + client.analyze_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSentimentRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_sentiment(request) + + +def test_analyze_sentiment_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_sentiment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/documents:analyzeSentiment" % client.transport._host, args[1]) + + +def test_analyze_sentiment_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_sentiment_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitiesRequest, + dict, +]) +def test_analyze_entities_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_entities(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + assert response.language == 'language_value' + + +def test_analyze_entities_rest_required_fields(request_type=language_service.AnalyzeEntitiesRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_entities(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_entities_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_entities._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_entities_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entities") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entities") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeEntitiesRequest.pb(language_service.AnalyzeEntitiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeEntitiesResponse.to_json(language_service.AnalyzeEntitiesResponse()) + + request = language_service.AnalyzeEntitiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeEntitiesResponse() + + client.analyze_entities(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_entities_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitiesRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_entities(request) + + +def test_analyze_entities_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_entities(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/documents:analyzeEntities" % client.transport._host, args[1]) + + +def test_analyze_entities_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entities_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitySentimentRequest, + dict, +]) +def test_analyze_entity_sentiment_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_entity_sentiment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_entity_sentiment_rest_required_fields(request_type=language_service.AnalyzeEntitySentimentRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_entity_sentiment(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_entity_sentiment_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_entity_sentiment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entity_sentiment") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entity_sentiment") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeEntitySentimentRequest.pb(language_service.AnalyzeEntitySentimentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeEntitySentimentResponse.to_json(language_service.AnalyzeEntitySentimentResponse()) + + request = language_service.AnalyzeEntitySentimentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeEntitySentimentResponse() + + client.analyze_entity_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_entity_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitySentimentRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_entity_sentiment(request) + + +def test_analyze_entity_sentiment_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_entity_sentiment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/documents:analyzeEntitySentiment" % client.transport._host, args[1]) + + +def test_analyze_entity_sentiment_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entity_sentiment_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSyntaxRequest, + dict, +]) +def test_analyze_syntax_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_syntax(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + assert response.language == 'language_value' + + +def test_analyze_syntax_rest_required_fields(request_type=language_service.AnalyzeSyntaxRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_syntax(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_syntax_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_syntax._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_syntax_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_syntax") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_syntax") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeSyntaxRequest.pb(language_service.AnalyzeSyntaxRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeSyntaxResponse.to_json(language_service.AnalyzeSyntaxResponse()) + + request = language_service.AnalyzeSyntaxRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeSyntaxResponse() + + client.analyze_syntax(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_syntax_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSyntaxRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_syntax(request) + + +def test_analyze_syntax_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_syntax(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/documents:analyzeSyntax" % client.transport._host, args[1]) + + +def test_analyze_syntax_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_syntax_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.ClassifyTextRequest, + dict, +]) +def test_classify_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.classify_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +def test_classify_text_rest_required_fields(request_type=language_service.ClassifyTextRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.classify_text(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_classify_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.classify_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_classify_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_classify_text") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_classify_text") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.ClassifyTextRequest.pb(language_service.ClassifyTextRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.ClassifyTextResponse.to_json(language_service.ClassifyTextResponse()) + + request = language_service.ClassifyTextRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.ClassifyTextResponse() + + client.classify_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_classify_text_rest_bad_request(transport: str = 'rest', request_type=language_service.ClassifyTextRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.classify_text(request) + + +def test_classify_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.classify_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/documents:classifyText" % client.transport._host, args[1]) + + +def test_classify_text_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + +def test_classify_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.ModerateTextRequest, + dict, +]) +def test_moderate_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.moderate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +def test_moderate_text_rest_required_fields(request_type=language_service.ModerateTextRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).moderate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).moderate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.moderate_text(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_moderate_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.moderate_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_moderate_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_moderate_text") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_moderate_text") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.ModerateTextRequest.pb(language_service.ModerateTextRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.ModerateTextResponse.to_json(language_service.ModerateTextResponse()) + + request = language_service.ModerateTextRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.ModerateTextResponse() + + client.moderate_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_moderate_text_rest_bad_request(transport: str = 'rest', request_type=language_service.ModerateTextRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.moderate_text(request) + + +def test_moderate_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.moderate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/documents:moderateText" % client.transport._host, args[1]) + + +def test_moderate_text_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + +def test_moderate_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnnotateTextRequest, + dict, +]) +def test_annotate_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.annotate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + assert response.language == 'language_value' + + +def test_annotate_text_rest_required_fields(request_type=language_service.AnnotateTextRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.annotate_text(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_annotate_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.annotate_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", "features", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_annotate_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_annotate_text") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_annotate_text") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnnotateTextRequest.pb(language_service.AnnotateTextRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnnotateTextResponse.to_json(language_service.AnnotateTextResponse()) + + request = language_service.AnnotateTextRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnnotateTextResponse() + + client.annotate_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_annotate_text_rest_bad_request(transport: str = 'rest', request_type=language_service.AnnotateTextRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.annotate_text(request) + + +def test_annotate_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.annotate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/documents:annotateText" % client.transport._host, args[1]) + + +def test_annotate_text_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_annotate_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LanguageServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LanguageServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + transports.LanguageServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = LanguageServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LanguageServiceGrpcTransport, + ) + +def test_language_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LanguageServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_language_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.LanguageServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'analyze_sentiment', + 'analyze_entities', + 'analyze_entity_sentiment', + 'analyze_syntax', + 'classify_text', + 'moderate_text', + 'annotate_text', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_language_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LanguageServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_language_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LanguageServiceTransport() + adc.assert_called_once() + + +def test_language_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LanguageServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + ], +) +def test_language_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-language', 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + transports.LanguageServiceRestTransport, + ], +) +def test_language_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LanguageServiceGrpcTransport, grpc_helpers), + (transports.LanguageServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_language_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "language.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="language.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) +def test_language_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_language_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.LanguageServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_language_service_host_no_port(transport_name): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'language.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://language.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_language_service_host_with_port(transport_name): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'language.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://language.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_language_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LanguageServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LanguageServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.analyze_sentiment._session + session2 = client2.transport.analyze_sentiment._session + assert session1 != session2 + session1 = client1.transport.analyze_entities._session + session2 = client2.transport.analyze_entities._session + assert session1 != session2 + session1 = client1.transport.analyze_entity_sentiment._session + session2 = client2.transport.analyze_entity_sentiment._session + assert session1 != session2 + session1 = client1.transport.analyze_syntax._session + session2 = client2.transport.analyze_syntax._session + assert session1 != session2 + session1 = client1.transport.classify_text._session + session2 = client2.transport.classify_text._session + assert session1 != session2 + session1 = client1.transport.moderate_text._session + session2 = client2.transport.moderate_text._session + assert session1 != session2 + session1 = client1.transport.annotate_text._session + session2 = client2.transport.annotate_text._session + assert session1 != session2 +def test_language_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LanguageServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_language_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LanguageServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) +def test_language_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) +def test_language_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = LanguageServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = LanguageServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = LanguageServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = LanguageServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = LanguageServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = LanguageServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = LanguageServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = LanguageServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = LanguageServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = LanguageServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = LanguageServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1beta2/.coveragerc b/owl-bot-staging/v1beta2/.coveragerc new file mode 100644 index 00000000..c1f51536 --- /dev/null +++ b/owl-bot-staging/v1beta2/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/language/__init__.py + google/cloud/language/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v1beta2/.flake8 b/owl-bot-staging/v1beta2/.flake8 new file mode 100644 index 00000000..29227d4c --- /dev/null +++ b/owl-bot-staging/v1beta2/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v1beta2/MANIFEST.in b/owl-bot-staging/v1beta2/MANIFEST.in new file mode 100644 index 00000000..dcc097e7 --- /dev/null +++ b/owl-bot-staging/v1beta2/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/language *.py +recursive-include google/cloud/language_v1beta2 *.py diff --git a/owl-bot-staging/v1beta2/README.rst b/owl-bot-staging/v1beta2/README.rst new file mode 100644 index 00000000..0c5f1b6b --- /dev/null +++ b/owl-bot-staging/v1beta2/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Language API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Language API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1beta2/docs/conf.py b/owl-bot-staging/v1beta2/docs/conf.py new file mode 100644 index 00000000..2e1b322d --- /dev/null +++ b/owl-bot-staging/v1beta2/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-language documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-language" +copyright = u"2022, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-language-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-language.tex", + u"google-cloud-language Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-language", + u"Google Cloud Language Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-language", + u"google-cloud-language Documentation", + author, + "google-cloud-language", + "GAPIC library for Google Cloud Language API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v1beta2/docs/index.rst b/owl-bot-staging/v1beta2/docs/index.rst new file mode 100644 index 00000000..42b8e680 --- /dev/null +++ b/owl-bot-staging/v1beta2/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + language_v1beta2/services + language_v1beta2/types diff --git a/owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst b/owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst new file mode 100644 index 00000000..799a7892 --- /dev/null +++ b/owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst @@ -0,0 +1,6 @@ +LanguageService +--------------------------------- + +.. automodule:: google.cloud.language_v1beta2.services.language_service + :members: + :inherited-members: diff --git a/owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst b/owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst new file mode 100644 index 00000000..40ead585 --- /dev/null +++ b/owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Language v1beta2 API +============================================== +.. toctree:: + :maxdepth: 2 + + language_service diff --git a/owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst b/owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst new file mode 100644 index 00000000..2e834e61 --- /dev/null +++ b/owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Language v1beta2 API +=========================================== + +.. automodule:: google.cloud.language_v1beta2.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v1beta2/google/cloud/language/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language/__init__.py new file mode 100644 index 00000000..6bfa0911 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language/__init__.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.language import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.language_v1beta2.services.language_service.client import LanguageServiceClient +from google.cloud.language_v1beta2.services.language_service.async_client import LanguageServiceAsyncClient + +from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitiesRequest +from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitiesResponse +from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitySentimentRequest +from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitySentimentResponse +from google.cloud.language_v1beta2.types.language_service import AnalyzeSentimentRequest +from google.cloud.language_v1beta2.types.language_service import AnalyzeSentimentResponse +from google.cloud.language_v1beta2.types.language_service import AnalyzeSyntaxRequest +from google.cloud.language_v1beta2.types.language_service import AnalyzeSyntaxResponse +from google.cloud.language_v1beta2.types.language_service import AnnotateTextRequest +from google.cloud.language_v1beta2.types.language_service import AnnotateTextResponse +from google.cloud.language_v1beta2.types.language_service import ClassificationCategory +from google.cloud.language_v1beta2.types.language_service import ClassificationModelOptions +from google.cloud.language_v1beta2.types.language_service import ClassifyTextRequest +from google.cloud.language_v1beta2.types.language_service import ClassifyTextResponse +from google.cloud.language_v1beta2.types.language_service import DependencyEdge +from google.cloud.language_v1beta2.types.language_service import Document +from google.cloud.language_v1beta2.types.language_service import Entity +from google.cloud.language_v1beta2.types.language_service import EntityMention +from google.cloud.language_v1beta2.types.language_service import ModerateTextRequest +from google.cloud.language_v1beta2.types.language_service import ModerateTextResponse +from google.cloud.language_v1beta2.types.language_service import PartOfSpeech +from google.cloud.language_v1beta2.types.language_service import Sentence +from google.cloud.language_v1beta2.types.language_service import Sentiment +from google.cloud.language_v1beta2.types.language_service import TextSpan +from google.cloud.language_v1beta2.types.language_service import Token +from google.cloud.language_v1beta2.types.language_service import EncodingType + +__all__ = ('LanguageServiceClient', + 'LanguageServiceAsyncClient', + 'AnalyzeEntitiesRequest', + 'AnalyzeEntitiesResponse', + 'AnalyzeEntitySentimentRequest', + 'AnalyzeEntitySentimentResponse', + 'AnalyzeSentimentRequest', + 'AnalyzeSentimentResponse', + 'AnalyzeSyntaxRequest', + 'AnalyzeSyntaxResponse', + 'AnnotateTextRequest', + 'AnnotateTextResponse', + 'ClassificationCategory', + 'ClassificationModelOptions', + 'ClassifyTextRequest', + 'ClassifyTextResponse', + 'DependencyEdge', + 'Document', + 'Entity', + 'EntityMention', + 'ModerateTextRequest', + 'ModerateTextResponse', + 'PartOfSpeech', + 'Sentence', + 'Sentiment', + 'TextSpan', + 'Token', + 'EncodingType', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py b/owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py new file mode 100644 index 00000000..405b1ceb --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1beta2/google/cloud/language/py.typed b/owl-bot-staging/v1beta2/google/cloud/language/py.typed new file mode 100644 index 00000000..c0acc99a --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py new file mode 100644 index 00000000..e6a87024 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.language_v1beta2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.language_service import LanguageServiceClient +from .services.language_service import LanguageServiceAsyncClient + +from .types.language_service import AnalyzeEntitiesRequest +from .types.language_service import AnalyzeEntitiesResponse +from .types.language_service import AnalyzeEntitySentimentRequest +from .types.language_service import AnalyzeEntitySentimentResponse +from .types.language_service import AnalyzeSentimentRequest +from .types.language_service import AnalyzeSentimentResponse +from .types.language_service import AnalyzeSyntaxRequest +from .types.language_service import AnalyzeSyntaxResponse +from .types.language_service import AnnotateTextRequest +from .types.language_service import AnnotateTextResponse +from .types.language_service import ClassificationCategory +from .types.language_service import ClassificationModelOptions +from .types.language_service import ClassifyTextRequest +from .types.language_service import ClassifyTextResponse +from .types.language_service import DependencyEdge +from .types.language_service import Document +from .types.language_service import Entity +from .types.language_service import EntityMention +from .types.language_service import ModerateTextRequest +from .types.language_service import ModerateTextResponse +from .types.language_service import PartOfSpeech +from .types.language_service import Sentence +from .types.language_service import Sentiment +from .types.language_service import TextSpan +from .types.language_service import Token +from .types.language_service import EncodingType + +__all__ = ( + 'LanguageServiceAsyncClient', +'AnalyzeEntitiesRequest', +'AnalyzeEntitiesResponse', +'AnalyzeEntitySentimentRequest', +'AnalyzeEntitySentimentResponse', +'AnalyzeSentimentRequest', +'AnalyzeSentimentResponse', +'AnalyzeSyntaxRequest', +'AnalyzeSyntaxResponse', +'AnnotateTextRequest', +'AnnotateTextResponse', +'ClassificationCategory', +'ClassificationModelOptions', +'ClassifyTextRequest', +'ClassifyTextResponse', +'DependencyEdge', +'Document', +'EncodingType', +'Entity', +'EntityMention', +'LanguageServiceClient', +'ModerateTextRequest', +'ModerateTextResponse', +'PartOfSpeech', +'Sentence', +'Sentiment', +'TextSpan', +'Token', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json new file mode 100644 index 00000000..85a901f9 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json @@ -0,0 +1,133 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.language_v1beta2", + "protoPackage": "google.cloud.language.v1beta2", + "schema": "1.0", + "services": { + "LanguageService": { + "clients": { + "grpc": { + "libraryClient": "LanguageServiceClient", + "rpcs": { + "AnalyzeEntities": { + "methods": [ + "analyze_entities" + ] + }, + "AnalyzeEntitySentiment": { + "methods": [ + "analyze_entity_sentiment" + ] + }, + "AnalyzeSentiment": { + "methods": [ + "analyze_sentiment" + ] + }, + "AnalyzeSyntax": { + "methods": [ + "analyze_syntax" + ] + }, + "AnnotateText": { + "methods": [ + "annotate_text" + ] + }, + "ClassifyText": { + "methods": [ + "classify_text" + ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] + } + } + }, + "grpc-async": { + "libraryClient": "LanguageServiceAsyncClient", + "rpcs": { + "AnalyzeEntities": { + "methods": [ + "analyze_entities" + ] + }, + "AnalyzeEntitySentiment": { + "methods": [ + "analyze_entity_sentiment" + ] + }, + "AnalyzeSentiment": { + "methods": [ + "analyze_sentiment" + ] + }, + "AnalyzeSyntax": { + "methods": [ + "analyze_syntax" + ] + }, + "AnnotateText": { + "methods": [ + "annotate_text" + ] + }, + "ClassifyText": { + "methods": [ + "classify_text" + ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] + } + } + }, + "rest": { + "libraryClient": "LanguageServiceClient", + "rpcs": { + "AnalyzeEntities": { + "methods": [ + "analyze_entities" + ] + }, + "AnalyzeEntitySentiment": { + "methods": [ + "analyze_entity_sentiment" + ] + }, + "AnalyzeSentiment": { + "methods": [ + "analyze_sentiment" + ] + }, + "AnalyzeSyntax": { + "methods": [ + "analyze_syntax" + ] + }, + "AnnotateText": { + "methods": [ + "annotate_text" + ] + }, + "ClassifyText": { + "methods": [ + "classify_text" + ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py new file mode 100644 index 00000000..405b1ceb --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed new file mode 100644 index 00000000..c0acc99a --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py new file mode 100644 index 00000000..e8e1c384 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py new file mode 100644 index 00000000..6e5f9052 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import LanguageServiceClient +from .async_client import LanguageServiceAsyncClient + +__all__ = ( + 'LanguageServiceClient', + 'LanguageServiceAsyncClient', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py new file mode 100644 index 00000000..a3a46b78 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py @@ -0,0 +1,963 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.language_v1beta2 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.language_v1beta2.types import language_service +from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .client import LanguageServiceClient + + +class LanguageServiceAsyncClient: + """Provides text analysis operations such as sentiment analysis + and entity recognition. + """ + + _client: LanguageServiceClient + + DEFAULT_ENDPOINT = LanguageServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LanguageServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod(LanguageServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(LanguageServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(LanguageServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(LanguageServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(LanguageServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(LanguageServiceClient.parse_common_organization_path) + common_project_path = staticmethod(LanguageServiceClient.common_project_path) + parse_common_project_path = staticmethod(LanguageServiceClient.parse_common_project_path) + common_location_path = staticmethod(LanguageServiceClient.common_location_path) + parse_common_location_path = staticmethod(LanguageServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceAsyncClient: The constructed client. + """ + return LanguageServiceClient.from_service_account_info.__func__(LanguageServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceAsyncClient: The constructed client. + """ + return LanguageServiceClient.from_service_account_file.__func__(LanguageServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return LanguageServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> LanguageServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LanguageServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(LanguageServiceClient).get_transport_class, type(LanguageServiceClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, LanguageServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the language service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.LanguageServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = LanguageServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def analyze_sentiment(self, + request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: + r"""Analyzes the sentiment of the provided text. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_analyze_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeSentimentRequest, dict]]): + The request object. The sentiment analysis request + message. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): + The encoding type used by the API to + calculate sentence offsets for the + sentence sentiment. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeSentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_entities(self, + request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_analyze_entities(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entities(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest, dict]]): + The request object. The entity analysis request message. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeEntitiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_entities, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_entity_sentiment(self, + request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest, dict]]): + The request object. The entity-level sentiment analysis + request message. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeEntitySentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_entity_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_syntax(self, + request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_analyze_syntax(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = await client.analyze_syntax(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest, dict]]): + The request object. The syntax analysis request message. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnalyzeSyntaxRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_syntax, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def classify_text(self, + request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ClassifyTextResponse: + r"""Classifies a document into categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_classify_text(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = await client.classify_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.ClassifyTextRequest, dict]]): + The request object. The document classification request + message. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.ClassifyTextResponse: + The document classification response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.ClassifyTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.classify_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def moderate_text(self, + request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ModerateTextResponse: + r"""Moderates a document for harmful and sensitive + categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_moderate_text(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ModerateTextRequest( + document=document, + ) + + # Make the request + response = await client.moderate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.ModerateTextRequest, dict]]): + The request object. The document moderation request + message. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.ModerateTextResponse: + The document moderation response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.ModerateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.moderate_text, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def annotate_text(self, + request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + features: Optional[language_service.AnnotateTextRequest.Features] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnnotateTextResponse: + r"""A convenience method that provides all syntax, + sentiment, entity, and classification features in one + call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + async def sample_annotate_text(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = await client.annotate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1beta2.types.AnnotateTextRequest, dict]]): + The request object. The request message for the text + annotation API, which can perform + multiple analysis types (sentiment, + entities, and syntax) in one call. + document (:class:`google.cloud.language_v1beta2.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (:class:`google.cloud.language_v1beta2.types.AnnotateTextRequest.Features`): + Required. The enabled features. + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnnotateTextResponse: + The text annotations response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, features, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = language_service.AnnotateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if features is not None: + request.features = features + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.annotate_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "LanguageServiceAsyncClient", +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py new file mode 100644 index 00000000..9093d5b6 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py @@ -0,0 +1,1116 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.language_v1beta2 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.language_v1beta2.types import language_service +from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import LanguageServiceGrpcTransport +from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .transports.rest import LanguageServiceRestTransport + + +class LanguageServiceClientMeta(type): + """Metaclass for the LanguageService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] + _transport_registry["grpc"] = LanguageServiceGrpcTransport + _transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport + _transport_registry["rest"] = LanguageServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[LanguageServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LanguageServiceClient(metaclass=LanguageServiceClientMeta): + """Provides text analysis operations such as sentiment analysis + and entity recognition. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "language.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LanguageServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LanguageServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, LanguageServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the language service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, LanguageServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, LanguageServiceTransport): + # transport is a LanguageServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def analyze_sentiment(self, + request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: + r"""Analyzes the sentiment of the provided text. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_analyze_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.AnalyzeSentimentRequest, dict]): + The request object. The sentiment analysis request + message. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate sentence offsets for the + sentence sentiment. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeSentimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeSentimentRequest): + request = language_service.AnalyzeSentimentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_sentiment] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_entities(self, + request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_analyze_entities(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = client.analyze_entities(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest, dict]): + The request object. The entity analysis request message. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeEntitiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeEntitiesRequest): + request = language_service.AnalyzeEntitiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_entities] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_entity_sentiment(self, + request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest, dict]): + The request object. The entity-level sentiment analysis + request message. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeEntitySentimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeEntitySentimentRequest): + request = language_service.AnalyzeEntitySentimentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_entity_sentiment] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_syntax(self, + request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_analyze_syntax(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = client.analyze_syntax(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest, dict]): + The request object. The syntax analysis request message. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeSyntaxRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeSyntaxRequest): + request = language_service.AnalyzeSyntaxRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_syntax] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def classify_text(self, + request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ClassifyTextResponse: + r"""Classifies a document into categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_classify_text(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = client.classify_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.ClassifyTextRequest, dict]): + The request object. The document classification request + message. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.ClassifyTextResponse: + The document classification response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.ClassifyTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.ClassifyTextRequest): + request = language_service.ClassifyTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.classify_text] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def moderate_text(self, + request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ModerateTextResponse: + r"""Moderates a document for harmful and sensitive + categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_moderate_text(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ModerateTextRequest( + document=document, + ) + + # Make the request + response = client.moderate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.ModerateTextRequest, dict]): + The request object. The document moderation request + message. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.ModerateTextResponse: + The document moderation response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.ModerateTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.ModerateTextRequest): + request = language_service.ModerateTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.moderate_text] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def annotate_text(self, + request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + features: Optional[language_service.AnnotateTextRequest.Features] = None, + encoding_type: Optional[language_service.EncodingType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnnotateTextResponse: + r"""A convenience method that provides all syntax, + sentiment, entity, and classification features in one + call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1beta2 + + def sample_annotate_text(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = client.annotate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1beta2.types.AnnotateTextRequest, dict]): + The request object. The request message for the text + annotation API, which can perform + multiple analysis types (sentiment, + entities, and syntax) in one call. + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (google.cloud.language_v1beta2.types.AnnotateTextRequest.Features): + Required. The enabled features. + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1beta2.types.AnnotateTextResponse: + The text annotations response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, features, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnnotateTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnnotateTextRequest): + request = language_service.AnnotateTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if features is not None: + request.features = features + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.annotate_text] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "LanguageServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "LanguageServiceClient", +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py new file mode 100644 index 00000000..3cb6ab92 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import LanguageServiceTransport +from .grpc import LanguageServiceGrpcTransport +from .grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .rest import LanguageServiceRestTransport +from .rest import LanguageServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] +_transport_registry['grpc'] = LanguageServiceGrpcTransport +_transport_registry['grpc_asyncio'] = LanguageServiceGrpcAsyncIOTransport +_transport_registry['rest'] = LanguageServiceRestTransport + +__all__ = ( + 'LanguageServiceTransport', + 'LanguageServiceGrpcTransport', + 'LanguageServiceGrpcAsyncIOTransport', + 'LanguageServiceRestTransport', + 'LanguageServiceRestInterceptor', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py new file mode 100644 index 00000000..99ee1db2 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.language_v1beta2 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.language_v1beta2.types import language_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class LanguageServiceTransport(abc.ABC): + """Abstract transport class for LanguageService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'language.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.analyze_sentiment: gapic_v1.method.wrap_method( + self.analyze_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_entities: gapic_v1.method.wrap_method( + self.analyze_entities, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_entity_sentiment: gapic_v1.method.wrap_method( + self.analyze_entity_sentiment, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_syntax: gapic_v1.method.wrap_method( + self.analyze_syntax, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.classify_text: gapic_v1.method.wrap_method( + self.classify_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.moderate_text: gapic_v1.method.wrap_method( + self.moderate_text, + default_timeout=None, + client_info=client_info, + ), + self.annotate_text: gapic_v1.method.wrap_method( + self.annotate_text, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + Union[ + language_service.AnalyzeSentimentResponse, + Awaitable[language_service.AnalyzeSentimentResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + Union[ + language_service.AnalyzeEntitiesResponse, + Awaitable[language_service.AnalyzeEntitiesResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + Union[ + language_service.AnalyzeEntitySentimentResponse, + Awaitable[language_service.AnalyzeEntitySentimentResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + Union[ + language_service.AnalyzeSyntaxResponse, + Awaitable[language_service.AnalyzeSyntaxResponse] + ]]: + raise NotImplementedError() + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + Union[ + language_service.ClassifyTextResponse, + Awaitable[language_service.ClassifyTextResponse] + ]]: + raise NotImplementedError() + + @property + def moderate_text(self) -> Callable[ + [language_service.ModerateTextRequest], + Union[ + language_service.ModerateTextResponse, + Awaitable[language_service.ModerateTextResponse] + ]]: + raise NotImplementedError() + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + Union[ + language_service.AnnotateTextResponse, + Awaitable[language_service.AnnotateTextResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'LanguageServiceTransport', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py new file mode 100644 index 00000000..48b7cd8b --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py @@ -0,0 +1,432 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.language_v1beta2.types import language_service +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO + + +class LanguageServiceGrpcTransport(LanguageServiceTransport): + """gRPC backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + language_service.AnalyzeSentimentResponse]: + r"""Return a callable for the analyze sentiment method over gRPC. + + Analyzes the sentiment of the provided text. + + Returns: + Callable[[~.AnalyzeSentimentRequest], + ~.AnalyzeSentimentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_sentiment' not in self._stubs: + self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment', + request_serializer=language_service.AnalyzeSentimentRequest.serialize, + response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, + ) + return self._stubs['analyze_sentiment'] + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + language_service.AnalyzeEntitiesResponse]: + r"""Return a callable for the analyze entities method over gRPC. + + Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Returns: + Callable[[~.AnalyzeEntitiesRequest], + ~.AnalyzeEntitiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entities' not in self._stubs: + self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities', + request_serializer=language_service.AnalyzeEntitiesRequest.serialize, + response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, + ) + return self._stubs['analyze_entities'] + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + language_service.AnalyzeEntitySentimentResponse]: + r"""Return a callable for the analyze entity sentiment method over gRPC. + + Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Returns: + Callable[[~.AnalyzeEntitySentimentRequest], + ~.AnalyzeEntitySentimentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entity_sentiment' not in self._stubs: + self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment', + request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, + response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, + ) + return self._stubs['analyze_entity_sentiment'] + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + language_service.AnalyzeSyntaxResponse]: + r"""Return a callable for the analyze syntax method over gRPC. + + Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + Returns: + Callable[[~.AnalyzeSyntaxRequest], + ~.AnalyzeSyntaxResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_syntax' not in self._stubs: + self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax', + request_serializer=language_service.AnalyzeSyntaxRequest.serialize, + response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, + ) + return self._stubs['analyze_syntax'] + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + language_service.ClassifyTextResponse]: + r"""Return a callable for the classify text method over gRPC. + + Classifies a document into categories. + + Returns: + Callable[[~.ClassifyTextRequest], + ~.ClassifyTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'classify_text' not in self._stubs: + self._stubs['classify_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/ClassifyText', + request_serializer=language_service.ClassifyTextRequest.serialize, + response_deserializer=language_service.ClassifyTextResponse.deserialize, + ) + return self._stubs['classify_text'] + + @property + def moderate_text(self) -> Callable[ + [language_service.ModerateTextRequest], + language_service.ModerateTextResponse]: + r"""Return a callable for the moderate text method over gRPC. + + Moderates a document for harmful and sensitive + categories. + + Returns: + Callable[[~.ModerateTextRequest], + ~.ModerateTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'moderate_text' not in self._stubs: + self._stubs['moderate_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/ModerateText', + request_serializer=language_service.ModerateTextRequest.serialize, + response_deserializer=language_service.ModerateTextResponse.deserialize, + ) + return self._stubs['moderate_text'] + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + language_service.AnnotateTextResponse]: + r"""Return a callable for the annotate text method over gRPC. + + A convenience method that provides all syntax, + sentiment, entity, and classification features in one + call. + + Returns: + Callable[[~.AnnotateTextRequest], + ~.AnnotateTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'annotate_text' not in self._stubs: + self._stubs['annotate_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnnotateText', + request_serializer=language_service.AnnotateTextRequest.serialize, + response_deserializer=language_service.AnnotateTextResponse.deserialize, + ) + return self._stubs['annotate_text'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'LanguageServiceGrpcTransport', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..710e8bb5 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py @@ -0,0 +1,431 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.language_v1beta2.types import language_service +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import LanguageServiceGrpcTransport + + +class LanguageServiceGrpcAsyncIOTransport(LanguageServiceTransport): + """gRPC AsyncIO backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + Awaitable[language_service.AnalyzeSentimentResponse]]: + r"""Return a callable for the analyze sentiment method over gRPC. + + Analyzes the sentiment of the provided text. + + Returns: + Callable[[~.AnalyzeSentimentRequest], + Awaitable[~.AnalyzeSentimentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_sentiment' not in self._stubs: + self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment', + request_serializer=language_service.AnalyzeSentimentRequest.serialize, + response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, + ) + return self._stubs['analyze_sentiment'] + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + Awaitable[language_service.AnalyzeEntitiesResponse]]: + r"""Return a callable for the analyze entities method over gRPC. + + Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Returns: + Callable[[~.AnalyzeEntitiesRequest], + Awaitable[~.AnalyzeEntitiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entities' not in self._stubs: + self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities', + request_serializer=language_service.AnalyzeEntitiesRequest.serialize, + response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, + ) + return self._stubs['analyze_entities'] + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + Awaitable[language_service.AnalyzeEntitySentimentResponse]]: + r"""Return a callable for the analyze entity sentiment method over gRPC. + + Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Returns: + Callable[[~.AnalyzeEntitySentimentRequest], + Awaitable[~.AnalyzeEntitySentimentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_entity_sentiment' not in self._stubs: + self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment', + request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, + response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, + ) + return self._stubs['analyze_entity_sentiment'] + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + Awaitable[language_service.AnalyzeSyntaxResponse]]: + r"""Return a callable for the analyze syntax method over gRPC. + + Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + Returns: + Callable[[~.AnalyzeSyntaxRequest], + Awaitable[~.AnalyzeSyntaxResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_syntax' not in self._stubs: + self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax', + request_serializer=language_service.AnalyzeSyntaxRequest.serialize, + response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, + ) + return self._stubs['analyze_syntax'] + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + Awaitable[language_service.ClassifyTextResponse]]: + r"""Return a callable for the classify text method over gRPC. + + Classifies a document into categories. + + Returns: + Callable[[~.ClassifyTextRequest], + Awaitable[~.ClassifyTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'classify_text' not in self._stubs: + self._stubs['classify_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/ClassifyText', + request_serializer=language_service.ClassifyTextRequest.serialize, + response_deserializer=language_service.ClassifyTextResponse.deserialize, + ) + return self._stubs['classify_text'] + + @property + def moderate_text(self) -> Callable[ + [language_service.ModerateTextRequest], + Awaitable[language_service.ModerateTextResponse]]: + r"""Return a callable for the moderate text method over gRPC. + + Moderates a document for harmful and sensitive + categories. + + Returns: + Callable[[~.ModerateTextRequest], + Awaitable[~.ModerateTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'moderate_text' not in self._stubs: + self._stubs['moderate_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/ModerateText', + request_serializer=language_service.ModerateTextRequest.serialize, + response_deserializer=language_service.ModerateTextResponse.deserialize, + ) + return self._stubs['moderate_text'] + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + Awaitable[language_service.AnnotateTextResponse]]: + r"""Return a callable for the annotate text method over gRPC. + + A convenience method that provides all syntax, + sentiment, entity, and classification features in one + call. + + Returns: + Callable[[~.AnnotateTextRequest], + Awaitable[~.AnnotateTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'annotate_text' not in self._stubs: + self._stubs['annotate_text'] = self.grpc_channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnnotateText', + request_serializer=language_service.AnnotateTextRequest.serialize, + response_deserializer=language_service.AnnotateTextResponse.deserialize, + ) + return self._stubs['annotate_text'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'LanguageServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py new file mode 100644 index 00000000..9696c821 --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py @@ -0,0 +1,1029 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.language_v1beta2.types import language_service + +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class LanguageServiceRestInterceptor: + """Interceptor for LanguageService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the LanguageServiceRestTransport. + + .. code-block:: python + class MyCustomLanguageServiceInterceptor(LanguageServiceRestInterceptor): + def pre_analyze_entities(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_entities(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_entity_sentiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_entity_sentiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_sentiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_sentiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_syntax(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_syntax(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_annotate_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_annotate_text(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_classify_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_classify_text(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_moderate_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_moderate_text(self, response): + logging.log(f"Received response: {response}") + return response + + transport = LanguageServiceRestTransport(interceptor=MyCustomLanguageServiceInterceptor()) + client = LanguageServiceClient(transport=transport) + + + """ + def pre_analyze_entities(self, request: language_service.AnalyzeEntitiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_entities + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_entities(self, response: language_service.AnalyzeEntitiesResponse) -> language_service.AnalyzeEntitiesResponse: + """Post-rpc interceptor for analyze_entities + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_analyze_entity_sentiment(self, request: language_service.AnalyzeEntitySentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitySentimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_entity_sentiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_entity_sentiment(self, response: language_service.AnalyzeEntitySentimentResponse) -> language_service.AnalyzeEntitySentimentResponse: + """Post-rpc interceptor for analyze_entity_sentiment + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_analyze_sentiment(self, request: language_service.AnalyzeSentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSentimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_sentiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_sentiment(self, response: language_service.AnalyzeSentimentResponse) -> language_service.AnalyzeSentimentResponse: + """Post-rpc interceptor for analyze_sentiment + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_analyze_syntax(self, request: language_service.AnalyzeSyntaxRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSyntaxRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_syntax + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_syntax(self, response: language_service.AnalyzeSyntaxResponse) -> language_service.AnalyzeSyntaxResponse: + """Post-rpc interceptor for analyze_syntax + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_annotate_text(self, request: language_service.AnnotateTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnnotateTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for annotate_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_annotate_text(self, response: language_service.AnnotateTextResponse) -> language_service.AnnotateTextResponse: + """Post-rpc interceptor for annotate_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_classify_text(self, request: language_service.ClassifyTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.ClassifyTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for classify_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_classify_text(self, response: language_service.ClassifyTextResponse) -> language_service.ClassifyTextResponse: + """Post-rpc interceptor for classify_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + def pre_moderate_text(self, request: language_service.ModerateTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.ModerateTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for moderate_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_moderate_text(self, response: language_service.ModerateTextResponse) -> language_service.ModerateTextResponse: + """Post-rpc interceptor for moderate_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class LanguageServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: LanguageServiceRestInterceptor + + +class LanguageServiceRestTransport(LanguageServiceTransport): + """REST backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'language.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[LanguageServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or LanguageServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AnalyzeEntities(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeEntities") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeEntitiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Call the analyze entities method over HTTP. + + Args: + request (~.language_service.AnalyzeEntitiesRequest): + The request object. The entity analysis request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta2/documents:analyzeEntities', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_entities(request, metadata) + pb_request = language_service.AnalyzeEntitiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeEntitiesResponse() + pb_resp = language_service.AnalyzeEntitiesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_entities(resp) + return resp + + class _AnalyzeEntitySentiment(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeEntitySentiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeEntitySentimentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Call the analyze entity sentiment method over HTTP. + + Args: + request (~.language_service.AnalyzeEntitySentimentRequest): + The request object. The entity-level sentiment analysis + request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta2/documents:analyzeEntitySentiment', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_entity_sentiment(request, metadata) + pb_request = language_service.AnalyzeEntitySentimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeEntitySentimentResponse() + pb_resp = language_service.AnalyzeEntitySentimentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_entity_sentiment(resp) + return resp + + class _AnalyzeSentiment(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeSentiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeSentimentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeSentimentResponse: + r"""Call the analyze sentiment method over HTTP. + + Args: + request (~.language_service.AnalyzeSentimentRequest): + The request object. The sentiment analysis request + message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta2/documents:analyzeSentiment', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_sentiment(request, metadata) + pb_request = language_service.AnalyzeSentimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeSentimentResponse() + pb_resp = language_service.AnalyzeSentimentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_sentiment(resp) + return resp + + class _AnalyzeSyntax(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeSyntax") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnalyzeSyntaxRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Call the analyze syntax method over HTTP. + + Args: + request (~.language_service.AnalyzeSyntaxRequest): + The request object. The syntax analysis request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta2/documents:analyzeSyntax', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_syntax(request, metadata) + pb_request = language_service.AnalyzeSyntaxRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeSyntaxResponse() + pb_resp = language_service.AnalyzeSyntaxResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_syntax(resp) + return resp + + class _AnnotateText(LanguageServiceRestStub): + def __hash__(self): + return hash("AnnotateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.AnnotateTextRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.AnnotateTextResponse: + r"""Call the annotate text method over HTTP. + + Args: + request (~.language_service.AnnotateTextRequest): + The request object. The request message for the text + annotation API, which can perform + multiple analysis types (sentiment, + entities, and syntax) in one call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnnotateTextResponse: + The text annotations response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta2/documents:annotateText', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_annotate_text(request, metadata) + pb_request = language_service.AnnotateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnnotateTextResponse() + pb_resp = language_service.AnnotateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_annotate_text(resp) + return resp + + class _ClassifyText(LanguageServiceRestStub): + def __hash__(self): + return hash("ClassifyText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.ClassifyTextRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.ClassifyTextResponse: + r"""Call the classify text method over HTTP. + + Args: + request (~.language_service.ClassifyTextRequest): + The request object. The document classification request + message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ClassifyTextResponse: + The document classification response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta2/documents:classifyText', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_classify_text(request, metadata) + pb_request = language_service.ClassifyTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.ClassifyTextResponse() + pb_resp = language_service.ClassifyTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_classify_text(resp) + return resp + + class _ModerateText(LanguageServiceRestStub): + def __hash__(self): + return hash("ModerateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: language_service.ModerateTextRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> language_service.ModerateTextResponse: + r"""Call the moderate text method over HTTP. + + Args: + request (~.language_service.ModerateTextRequest): + The request object. The document moderation request + message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ModerateTextResponse: + The document moderation response + message. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta2/documents:moderateText', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_moderate_text(request, metadata) + pb_request = language_service.ModerateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.ModerateTextResponse() + pb_resp = language_service.ModerateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_moderate_text(resp) + return resp + + @property + def analyze_entities(self) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + language_service.AnalyzeEntitiesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeEntities(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_entity_sentiment(self) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + language_service.AnalyzeEntitySentimentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeEntitySentiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_sentiment(self) -> Callable[ + [language_service.AnalyzeSentimentRequest], + language_service.AnalyzeSentimentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeSentiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_syntax(self) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + language_service.AnalyzeSyntaxResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeSyntax(self._session, self._host, self._interceptor) # type: ignore + + @property + def annotate_text(self) -> Callable[ + [language_service.AnnotateTextRequest], + language_service.AnnotateTextResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnnotateText(self._session, self._host, self._interceptor) # type: ignore + + @property + def classify_text(self) -> Callable[ + [language_service.ClassifyTextRequest], + language_service.ClassifyTextResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ClassifyText(self._session, self._host, self._interceptor) # type: ignore + + @property + def moderate_text(self) -> Callable[ + [language_service.ModerateTextRequest], + language_service.ModerateTextResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ModerateText(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'LanguageServiceRestTransport', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py new file mode 100644 index 00000000..8dadfa8a --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .language_service import ( + AnalyzeEntitiesRequest, + AnalyzeEntitiesResponse, + AnalyzeEntitySentimentRequest, + AnalyzeEntitySentimentResponse, + AnalyzeSentimentRequest, + AnalyzeSentimentResponse, + AnalyzeSyntaxRequest, + AnalyzeSyntaxResponse, + AnnotateTextRequest, + AnnotateTextResponse, + ClassificationCategory, + ClassificationModelOptions, + ClassifyTextRequest, + ClassifyTextResponse, + DependencyEdge, + Document, + Entity, + EntityMention, + ModerateTextRequest, + ModerateTextResponse, + PartOfSpeech, + Sentence, + Sentiment, + TextSpan, + Token, + EncodingType, +) + +__all__ = ( + 'AnalyzeEntitiesRequest', + 'AnalyzeEntitiesResponse', + 'AnalyzeEntitySentimentRequest', + 'AnalyzeEntitySentimentResponse', + 'AnalyzeSentimentRequest', + 'AnalyzeSentimentResponse', + 'AnalyzeSyntaxRequest', + 'AnalyzeSyntaxResponse', + 'AnnotateTextRequest', + 'AnnotateTextResponse', + 'ClassificationCategory', + 'ClassificationModelOptions', + 'ClassifyTextRequest', + 'ClassifyTextResponse', + 'DependencyEdge', + 'Document', + 'Entity', + 'EntityMention', + 'ModerateTextRequest', + 'ModerateTextResponse', + 'PartOfSpeech', + 'Sentence', + 'Sentiment', + 'TextSpan', + 'Token', + 'EncodingType', +) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py new file mode 100644 index 00000000..3b27605f --- /dev/null +++ b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py @@ -0,0 +1,1761 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.language.v1beta2', + manifest={ + 'EncodingType', + 'Document', + 'Sentence', + 'Entity', + 'Token', + 'Sentiment', + 'PartOfSpeech', + 'DependencyEdge', + 'EntityMention', + 'TextSpan', + 'ClassificationCategory', + 'ClassificationModelOptions', + 'AnalyzeSentimentRequest', + 'AnalyzeSentimentResponse', + 'AnalyzeEntitySentimentRequest', + 'AnalyzeEntitySentimentResponse', + 'AnalyzeEntitiesRequest', + 'AnalyzeEntitiesResponse', + 'AnalyzeSyntaxRequest', + 'AnalyzeSyntaxResponse', + 'ClassifyTextRequest', + 'ClassifyTextResponse', + 'ModerateTextRequest', + 'ModerateTextResponse', + 'AnnotateTextRequest', + 'AnnotateTextResponse', + }, +) + + +class EncodingType(proto.Enum): + r"""Represents the text encoding that the caller uses to process the + output. Providing an ``EncodingType`` is recommended because the API + provides the beginning offsets for various outputs, such as tokens + and mentions, and languages that natively use different text + encodings may access offsets differently. + + Values: + NONE (0): + If ``EncodingType`` is not specified, encoding-dependent + information (such as ``begin_offset``) will be set at + ``-1``. + UTF8 (1): + Encoding-dependent information (such as ``begin_offset``) is + calculated based on the UTF-8 encoding of the input. C++ and + Go are examples of languages that use this encoding + natively. + UTF16 (2): + Encoding-dependent information (such as ``begin_offset``) is + calculated based on the UTF-16 encoding of the input. Java + and JavaScript are examples of languages that use this + encoding natively. + UTF32 (3): + Encoding-dependent information (such as ``begin_offset``) is + calculated based on the UTF-32 encoding of the input. Python + is an example of a language that uses this encoding + natively. + """ + NONE = 0 + UTF8 = 1 + UTF16 = 2 + UTF32 = 3 + + +class Document(proto.Message): + r"""Represents the input to API methods. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.language_v1beta2.types.Document.Type): + Required. If the type is not set or is ``TYPE_UNSPECIFIED``, + returns an ``INVALID_ARGUMENT`` error. + content (str): + The content of the input in string format. + Cloud audit logging exempt since it is based on + user data. + + This field is a member of `oneof`_ ``source``. + gcs_content_uri (str): + The Google Cloud Storage URI where the file content is + located. This URI must be of the form: + gs://bucket_name/object_name. For more details, see + https://cloud.google.com/storage/docs/reference-uris. NOTE: + Cloud Storage object versioning is not supported. + + This field is a member of `oneof`_ ``source``. + language (str): + The language of the document (if not specified, the language + is automatically detected). Both ISO and BCP-47 language + codes are accepted. `Language + Support `__ + lists currently supported languages for each API method. If + the language (either specified by the caller or + automatically detected) is not supported by the called API + method, an ``INVALID_ARGUMENT`` error is returned. + reference_web_uri (str): + The web URI where the document comes from. + This URI is not used for fetching the content, + but as a hint for analyzing the document. + boilerplate_handling (google.cloud.language_v1beta2.types.Document.BoilerplateHandling): + Indicates how detected boilerplate(e.g. + advertisements, copyright declarations, banners) + should be handled for this document. If not + specified, boilerplate will be treated the same + as content. + """ + class Type(proto.Enum): + r"""The document types enum. + + Values: + TYPE_UNSPECIFIED (0): + The content type is not specified. + PLAIN_TEXT (1): + Plain text + HTML (2): + HTML + """ + TYPE_UNSPECIFIED = 0 + PLAIN_TEXT = 1 + HTML = 2 + + class BoilerplateHandling(proto.Enum): + r"""Ways of handling boilerplate detected in the document + + Values: + BOILERPLATE_HANDLING_UNSPECIFIED (0): + The boilerplate handling is not specified. + SKIP_BOILERPLATE (1): + Do not analyze detected boilerplate. + Reference web URI is required for detecting + boilerplate. + KEEP_BOILERPLATE (2): + Treat boilerplate the same as content. + """ + BOILERPLATE_HANDLING_UNSPECIFIED = 0 + SKIP_BOILERPLATE = 1 + KEEP_BOILERPLATE = 2 + + type_: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + content: str = proto.Field( + proto.STRING, + number=2, + oneof='source', + ) + gcs_content_uri: str = proto.Field( + proto.STRING, + number=3, + oneof='source', + ) + language: str = proto.Field( + proto.STRING, + number=4, + ) + reference_web_uri: str = proto.Field( + proto.STRING, + number=5, + ) + boilerplate_handling: BoilerplateHandling = proto.Field( + proto.ENUM, + number=6, + enum=BoilerplateHandling, + ) + + +class Sentence(proto.Message): + r"""Represents a sentence in the input document. + + Attributes: + text (google.cloud.language_v1beta2.types.TextSpan): + The sentence text. + sentiment (google.cloud.language_v1beta2.types.Sentiment): + For calls to [AnalyzeSentiment][] or if + [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] + is set to true, this field will contain the sentiment for + the sentence. + """ + + text: 'TextSpan' = proto.Field( + proto.MESSAGE, + number=1, + message='TextSpan', + ) + sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=2, + message='Sentiment', + ) + + +class Entity(proto.Message): + r"""Represents a phrase in the text that is a known entity, such + as a person, an organization, or location. The API associates + information, such as salience and mentions, with entities. + + Attributes: + name (str): + The representative name for the entity. + type_ (google.cloud.language_v1beta2.types.Entity.Type): + The entity type. + metadata (MutableMapping[str, str]): + Metadata associated with the entity. + + For most entity types, the metadata is a Wikipedia URL + (``wikipedia_url``) and Knowledge Graph MID (``mid``), if + they are available. For the metadata associated with other + entity types, see the Type table below. + salience (float): + The salience score associated with the entity in the [0, + 1.0] range. + + The salience score for an entity provides information about + the importance or centrality of that entity to the entire + document text. Scores closer to 0 are less salient, while + scores closer to 1.0 are highly salient. + mentions (MutableSequence[google.cloud.language_v1beta2.types.EntityMention]): + The mentions of this entity in the input + document. The API currently supports proper noun + mentions. + sentiment (google.cloud.language_v1beta2.types.Sentiment): + For calls to [AnalyzeEntitySentiment][] or if + [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] + is set to true, this field will contain the aggregate + sentiment expressed for this entity in the provided + document. + """ + class Type(proto.Enum): + r"""The type of the entity. For most entity types, the associated + metadata is a Wikipedia URL (``wikipedia_url``) and Knowledge Graph + MID (``mid``). The table below lists the associated fields for + entities that have different metadata. + + Values: + UNKNOWN (0): + Unknown + PERSON (1): + Person + LOCATION (2): + Location + ORGANIZATION (3): + Organization + EVENT (4): + Event + WORK_OF_ART (5): + Artwork + CONSUMER_GOOD (6): + Consumer product + OTHER (7): + Other types of entities + PHONE_NUMBER (9): + Phone number + + The metadata lists the phone number, formatted according to + local convention, plus whichever additional elements appear + in the text: + + - ``number`` - the actual number, broken down into sections + as per local convention + - ``national_prefix`` - country code, if detected + - ``area_code`` - region or area code, if detected + - ``extension`` - phone extension (to be dialed after + connection), if detected + ADDRESS (10): + Address + + The metadata identifies the street number and locality plus + whichever additional elements appear in the text: + + - ``street_number`` - street number + - ``locality`` - city or town + - ``street_name`` - street/route name, if detected + - ``postal_code`` - postal code, if detected + - ``country`` - country, if detected< + - ``broad_region`` - administrative area, such as the + state, if detected + - ``narrow_region`` - smaller administrative area, such as + county, if detected + - ``sublocality`` - used in Asian addresses to demark a + district within a city, if detected + DATE (11): + Date + + The metadata identifies the components of the date: + + - ``year`` - four digit year, if detected + - ``month`` - two digit month number, if detected + - ``day`` - two digit day number, if detected + NUMBER (12): + Number + The metadata is the number itself. + PRICE (13): + Price + + The metadata identifies the ``value`` and ``currency``. + """ + UNKNOWN = 0 + PERSON = 1 + LOCATION = 2 + ORGANIZATION = 3 + EVENT = 4 + WORK_OF_ART = 5 + CONSUMER_GOOD = 6 + OTHER = 7 + PHONE_NUMBER = 9 + ADDRESS = 10 + DATE = 11 + NUMBER = 12 + PRICE = 13 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: Type = proto.Field( + proto.ENUM, + number=2, + enum=Type, + ) + metadata: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + salience: float = proto.Field( + proto.FLOAT, + number=4, + ) + mentions: MutableSequence['EntityMention'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='EntityMention', + ) + sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=6, + message='Sentiment', + ) + + +class Token(proto.Message): + r"""Represents the smallest syntactic building block of the text. + + Attributes: + text (google.cloud.language_v1beta2.types.TextSpan): + The token text. + part_of_speech (google.cloud.language_v1beta2.types.PartOfSpeech): + Parts of speech tag for this token. + dependency_edge (google.cloud.language_v1beta2.types.DependencyEdge): + Dependency tree parse for this token. + lemma (str): + `Lemma `__ + of the token. + """ + + text: 'TextSpan' = proto.Field( + proto.MESSAGE, + number=1, + message='TextSpan', + ) + part_of_speech: 'PartOfSpeech' = proto.Field( + proto.MESSAGE, + number=2, + message='PartOfSpeech', + ) + dependency_edge: 'DependencyEdge' = proto.Field( + proto.MESSAGE, + number=3, + message='DependencyEdge', + ) + lemma: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Sentiment(proto.Message): + r"""Represents the feeling associated with the entire text or + entities in the text. + Next ID: 6 + + Attributes: + magnitude (float): + A non-negative number in the [0, +inf) range, which + represents the absolute magnitude of sentiment regardless of + score (positive or negative). + score (float): + Sentiment score between -1.0 (negative + sentiment) and 1.0 (positive sentiment). + """ + + magnitude: float = proto.Field( + proto.FLOAT, + number=2, + ) + score: float = proto.Field( + proto.FLOAT, + number=3, + ) + + +class PartOfSpeech(proto.Message): + r"""Represents part of speech information for a token. + + Attributes: + tag (google.cloud.language_v1beta2.types.PartOfSpeech.Tag): + The part of speech tag. + aspect (google.cloud.language_v1beta2.types.PartOfSpeech.Aspect): + The grammatical aspect. + case (google.cloud.language_v1beta2.types.PartOfSpeech.Case): + The grammatical case. + form (google.cloud.language_v1beta2.types.PartOfSpeech.Form): + The grammatical form. + gender (google.cloud.language_v1beta2.types.PartOfSpeech.Gender): + The grammatical gender. + mood (google.cloud.language_v1beta2.types.PartOfSpeech.Mood): + The grammatical mood. + number (google.cloud.language_v1beta2.types.PartOfSpeech.Number): + The grammatical number. + person (google.cloud.language_v1beta2.types.PartOfSpeech.Person): + The grammatical person. + proper (google.cloud.language_v1beta2.types.PartOfSpeech.Proper): + The grammatical properness. + reciprocity (google.cloud.language_v1beta2.types.PartOfSpeech.Reciprocity): + The grammatical reciprocity. + tense (google.cloud.language_v1beta2.types.PartOfSpeech.Tense): + The grammatical tense. + voice (google.cloud.language_v1beta2.types.PartOfSpeech.Voice): + The grammatical voice. + """ + class Tag(proto.Enum): + r"""The part of speech tags enum. + + Values: + UNKNOWN (0): + Unknown + ADJ (1): + Adjective + ADP (2): + Adposition (preposition and postposition) + ADV (3): + Adverb + CONJ (4): + Conjunction + DET (5): + Determiner + NOUN (6): + Noun (common and proper) + NUM (7): + Cardinal number + PRON (8): + Pronoun + PRT (9): + Particle or other function word + PUNCT (10): + Punctuation + VERB (11): + Verb (all tenses and modes) + X (12): + Other: foreign words, typos, abbreviations + AFFIX (13): + Affix + """ + UNKNOWN = 0 + ADJ = 1 + ADP = 2 + ADV = 3 + CONJ = 4 + DET = 5 + NOUN = 6 + NUM = 7 + PRON = 8 + PRT = 9 + PUNCT = 10 + VERB = 11 + X = 12 + AFFIX = 13 + + class Aspect(proto.Enum): + r"""The characteristic of a verb that expresses time flow during + an event. + + Values: + ASPECT_UNKNOWN (0): + Aspect is not applicable in the analyzed + language or is not predicted. + PERFECTIVE (1): + Perfective + IMPERFECTIVE (2): + Imperfective + PROGRESSIVE (3): + Progressive + """ + ASPECT_UNKNOWN = 0 + PERFECTIVE = 1 + IMPERFECTIVE = 2 + PROGRESSIVE = 3 + + class Case(proto.Enum): + r"""The grammatical function performed by a noun or pronoun in a + phrase, clause, or sentence. In some languages, other parts of + speech, such as adjective and determiner, take case inflection + in agreement with the noun. + + Values: + CASE_UNKNOWN (0): + Case is not applicable in the analyzed + language or is not predicted. + ACCUSATIVE (1): + Accusative + ADVERBIAL (2): + Adverbial + COMPLEMENTIVE (3): + Complementive + DATIVE (4): + Dative + GENITIVE (5): + Genitive + INSTRUMENTAL (6): + Instrumental + LOCATIVE (7): + Locative + NOMINATIVE (8): + Nominative + OBLIQUE (9): + Oblique + PARTITIVE (10): + Partitive + PREPOSITIONAL (11): + Prepositional + REFLEXIVE_CASE (12): + Reflexive + RELATIVE_CASE (13): + Relative + VOCATIVE (14): + Vocative + """ + CASE_UNKNOWN = 0 + ACCUSATIVE = 1 + ADVERBIAL = 2 + COMPLEMENTIVE = 3 + DATIVE = 4 + GENITIVE = 5 + INSTRUMENTAL = 6 + LOCATIVE = 7 + NOMINATIVE = 8 + OBLIQUE = 9 + PARTITIVE = 10 + PREPOSITIONAL = 11 + REFLEXIVE_CASE = 12 + RELATIVE_CASE = 13 + VOCATIVE = 14 + + class Form(proto.Enum): + r"""Depending on the language, Form can be categorizing different + forms of verbs, adjectives, adverbs, etc. For example, + categorizing inflected endings of verbs and adjectives or + distinguishing between short and long forms of adjectives and + participles + + Values: + FORM_UNKNOWN (0): + Form is not applicable in the analyzed + language or is not predicted. + ADNOMIAL (1): + Adnomial + AUXILIARY (2): + Auxiliary + COMPLEMENTIZER (3): + Complementizer + FINAL_ENDING (4): + Final ending + GERUND (5): + Gerund + REALIS (6): + Realis + IRREALIS (7): + Irrealis + SHORT (8): + Short form + LONG (9): + Long form + ORDER (10): + Order form + SPECIFIC (11): + Specific form + """ + FORM_UNKNOWN = 0 + ADNOMIAL = 1 + AUXILIARY = 2 + COMPLEMENTIZER = 3 + FINAL_ENDING = 4 + GERUND = 5 + REALIS = 6 + IRREALIS = 7 + SHORT = 8 + LONG = 9 + ORDER = 10 + SPECIFIC = 11 + + class Gender(proto.Enum): + r"""Gender classes of nouns reflected in the behaviour of + associated words. + + Values: + GENDER_UNKNOWN (0): + Gender is not applicable in the analyzed + language or is not predicted. + FEMININE (1): + Feminine + MASCULINE (2): + Masculine + NEUTER (3): + Neuter + """ + GENDER_UNKNOWN = 0 + FEMININE = 1 + MASCULINE = 2 + NEUTER = 3 + + class Mood(proto.Enum): + r"""The grammatical feature of verbs, used for showing modality + and attitude. + + Values: + MOOD_UNKNOWN (0): + Mood is not applicable in the analyzed + language or is not predicted. + CONDITIONAL_MOOD (1): + Conditional + IMPERATIVE (2): + Imperative + INDICATIVE (3): + Indicative + INTERROGATIVE (4): + Interrogative + JUSSIVE (5): + Jussive + SUBJUNCTIVE (6): + Subjunctive + """ + MOOD_UNKNOWN = 0 + CONDITIONAL_MOOD = 1 + IMPERATIVE = 2 + INDICATIVE = 3 + INTERROGATIVE = 4 + JUSSIVE = 5 + SUBJUNCTIVE = 6 + + class Number(proto.Enum): + r"""Count distinctions. + + Values: + NUMBER_UNKNOWN (0): + Number is not applicable in the analyzed + language or is not predicted. + SINGULAR (1): + Singular + PLURAL (2): + Plural + DUAL (3): + Dual + """ + NUMBER_UNKNOWN = 0 + SINGULAR = 1 + PLURAL = 2 + DUAL = 3 + + class Person(proto.Enum): + r"""The distinction between the speaker, second person, third + person, etc. + + Values: + PERSON_UNKNOWN (0): + Person is not applicable in the analyzed + language or is not predicted. + FIRST (1): + First + SECOND (2): + Second + THIRD (3): + Third + REFLEXIVE_PERSON (4): + Reflexive + """ + PERSON_UNKNOWN = 0 + FIRST = 1 + SECOND = 2 + THIRD = 3 + REFLEXIVE_PERSON = 4 + + class Proper(proto.Enum): + r"""This category shows if the token is part of a proper name. + + Values: + PROPER_UNKNOWN (0): + Proper is not applicable in the analyzed + language or is not predicted. + PROPER (1): + Proper + NOT_PROPER (2): + Not proper + """ + PROPER_UNKNOWN = 0 + PROPER = 1 + NOT_PROPER = 2 + + class Reciprocity(proto.Enum): + r"""Reciprocal features of a pronoun. + + Values: + RECIPROCITY_UNKNOWN (0): + Reciprocity is not applicable in the analyzed + language or is not predicted. + RECIPROCAL (1): + Reciprocal + NON_RECIPROCAL (2): + Non-reciprocal + """ + RECIPROCITY_UNKNOWN = 0 + RECIPROCAL = 1 + NON_RECIPROCAL = 2 + + class Tense(proto.Enum): + r"""Time reference. + + Values: + TENSE_UNKNOWN (0): + Tense is not applicable in the analyzed + language or is not predicted. + CONDITIONAL_TENSE (1): + Conditional + FUTURE (2): + Future + PAST (3): + Past + PRESENT (4): + Present + IMPERFECT (5): + Imperfect + PLUPERFECT (6): + Pluperfect + """ + TENSE_UNKNOWN = 0 + CONDITIONAL_TENSE = 1 + FUTURE = 2 + PAST = 3 + PRESENT = 4 + IMPERFECT = 5 + PLUPERFECT = 6 + + class Voice(proto.Enum): + r"""The relationship between the action that a verb expresses and + the participants identified by its arguments. + + Values: + VOICE_UNKNOWN (0): + Voice is not applicable in the analyzed + language or is not predicted. + ACTIVE (1): + Active + CAUSATIVE (2): + Causative + PASSIVE (3): + Passive + """ + VOICE_UNKNOWN = 0 + ACTIVE = 1 + CAUSATIVE = 2 + PASSIVE = 3 + + tag: Tag = proto.Field( + proto.ENUM, + number=1, + enum=Tag, + ) + aspect: Aspect = proto.Field( + proto.ENUM, + number=2, + enum=Aspect, + ) + case: Case = proto.Field( + proto.ENUM, + number=3, + enum=Case, + ) + form: Form = proto.Field( + proto.ENUM, + number=4, + enum=Form, + ) + gender: Gender = proto.Field( + proto.ENUM, + number=5, + enum=Gender, + ) + mood: Mood = proto.Field( + proto.ENUM, + number=6, + enum=Mood, + ) + number: Number = proto.Field( + proto.ENUM, + number=7, + enum=Number, + ) + person: Person = proto.Field( + proto.ENUM, + number=8, + enum=Person, + ) + proper: Proper = proto.Field( + proto.ENUM, + number=9, + enum=Proper, + ) + reciprocity: Reciprocity = proto.Field( + proto.ENUM, + number=10, + enum=Reciprocity, + ) + tense: Tense = proto.Field( + proto.ENUM, + number=11, + enum=Tense, + ) + voice: Voice = proto.Field( + proto.ENUM, + number=12, + enum=Voice, + ) + + +class DependencyEdge(proto.Message): + r"""Represents dependency parse tree information for a token. + + Attributes: + head_token_index (int): + Represents the head of this token in the dependency tree. + This is the index of the token which has an arc going to + this token. The index is the position of the token in the + array of tokens returned by the API method. If this token is + a root token, then the ``head_token_index`` is its own + index. + label (google.cloud.language_v1beta2.types.DependencyEdge.Label): + The parse label for the token. + """ + class Label(proto.Enum): + r"""The parse label enum for the token. + + Values: + UNKNOWN (0): + Unknown + ABBREV (1): + Abbreviation modifier + ACOMP (2): + Adjectival complement + ADVCL (3): + Adverbial clause modifier + ADVMOD (4): + Adverbial modifier + AMOD (5): + Adjectival modifier of an NP + APPOS (6): + Appositional modifier of an NP + ATTR (7): + Attribute dependent of a copular verb + AUX (8): + Auxiliary (non-main) verb + AUXPASS (9): + Passive auxiliary + CC (10): + Coordinating conjunction + CCOMP (11): + Clausal complement of a verb or adjective + CONJ (12): + Conjunct + CSUBJ (13): + Clausal subject + CSUBJPASS (14): + Clausal passive subject + DEP (15): + Dependency (unable to determine) + DET (16): + Determiner + DISCOURSE (17): + Discourse + DOBJ (18): + Direct object + EXPL (19): + Expletive + GOESWITH (20): + Goes with (part of a word in a text not well + edited) + IOBJ (21): + Indirect object + MARK (22): + Marker (word introducing a subordinate + clause) + MWE (23): + Multi-word expression + MWV (24): + Multi-word verbal expression + NEG (25): + Negation modifier + NN (26): + Noun compound modifier + NPADVMOD (27): + Noun phrase used as an adverbial modifier + NSUBJ (28): + Nominal subject + NSUBJPASS (29): + Passive nominal subject + NUM (30): + Numeric modifier of a noun + NUMBER (31): + Element of compound number + P (32): + Punctuation mark + PARATAXIS (33): + Parataxis relation + PARTMOD (34): + Participial modifier + PCOMP (35): + The complement of a preposition is a clause + POBJ (36): + Object of a preposition + POSS (37): + Possession modifier + POSTNEG (38): + Postverbal negative particle + PRECOMP (39): + Predicate complement + PRECONJ (40): + Preconjunt + PREDET (41): + Predeterminer + PREF (42): + Prefix + PREP (43): + Prepositional modifier + PRONL (44): + The relationship between a verb and verbal + morpheme + PRT (45): + Particle + PS (46): + Associative or possessive marker + QUANTMOD (47): + Quantifier phrase modifier + RCMOD (48): + Relative clause modifier + RCMODREL (49): + Complementizer in relative clause + RDROP (50): + Ellipsis without a preceding predicate + REF (51): + Referent + REMNANT (52): + Remnant + REPARANDUM (53): + Reparandum + ROOT (54): + Root + SNUM (55): + Suffix specifying a unit of number + SUFF (56): + Suffix + TMOD (57): + Temporal modifier + TOPIC (58): + Topic marker + VMOD (59): + Clause headed by an infinite form of the verb + that modifies a noun + VOCATIVE (60): + Vocative + XCOMP (61): + Open clausal complement + SUFFIX (62): + Name suffix + TITLE (63): + Name title + ADVPHMOD (64): + Adverbial phrase modifier + AUXCAUS (65): + Causative auxiliary + AUXVV (66): + Helper auxiliary + DTMOD (67): + Rentaishi (Prenominal modifier) + FOREIGN (68): + Foreign words + KW (69): + Keyword + LIST (70): + List for chains of comparable items + NOMC (71): + Nominalized clause + NOMCSUBJ (72): + Nominalized clausal subject + NOMCSUBJPASS (73): + Nominalized clausal passive + NUMC (74): + Compound of numeric modifier + COP (75): + Copula + DISLOCATED (76): + Dislocated relation (for fronted/topicalized + elements) + ASP (77): + Aspect marker + GMOD (78): + Genitive modifier + GOBJ (79): + Genitive object + INFMOD (80): + Infinitival modifier + MES (81): + Measure + NCOMP (82): + Nominal complement of a noun + """ + UNKNOWN = 0 + ABBREV = 1 + ACOMP = 2 + ADVCL = 3 + ADVMOD = 4 + AMOD = 5 + APPOS = 6 + ATTR = 7 + AUX = 8 + AUXPASS = 9 + CC = 10 + CCOMP = 11 + CONJ = 12 + CSUBJ = 13 + CSUBJPASS = 14 + DEP = 15 + DET = 16 + DISCOURSE = 17 + DOBJ = 18 + EXPL = 19 + GOESWITH = 20 + IOBJ = 21 + MARK = 22 + MWE = 23 + MWV = 24 + NEG = 25 + NN = 26 + NPADVMOD = 27 + NSUBJ = 28 + NSUBJPASS = 29 + NUM = 30 + NUMBER = 31 + P = 32 + PARATAXIS = 33 + PARTMOD = 34 + PCOMP = 35 + POBJ = 36 + POSS = 37 + POSTNEG = 38 + PRECOMP = 39 + PRECONJ = 40 + PREDET = 41 + PREF = 42 + PREP = 43 + PRONL = 44 + PRT = 45 + PS = 46 + QUANTMOD = 47 + RCMOD = 48 + RCMODREL = 49 + RDROP = 50 + REF = 51 + REMNANT = 52 + REPARANDUM = 53 + ROOT = 54 + SNUM = 55 + SUFF = 56 + TMOD = 57 + TOPIC = 58 + VMOD = 59 + VOCATIVE = 60 + XCOMP = 61 + SUFFIX = 62 + TITLE = 63 + ADVPHMOD = 64 + AUXCAUS = 65 + AUXVV = 66 + DTMOD = 67 + FOREIGN = 68 + KW = 69 + LIST = 70 + NOMC = 71 + NOMCSUBJ = 72 + NOMCSUBJPASS = 73 + NUMC = 74 + COP = 75 + DISLOCATED = 76 + ASP = 77 + GMOD = 78 + GOBJ = 79 + INFMOD = 80 + MES = 81 + NCOMP = 82 + + head_token_index: int = proto.Field( + proto.INT32, + number=1, + ) + label: Label = proto.Field( + proto.ENUM, + number=2, + enum=Label, + ) + + +class EntityMention(proto.Message): + r"""Represents a mention for an entity in the text. Currently, + proper noun mentions are supported. + + Attributes: + text (google.cloud.language_v1beta2.types.TextSpan): + The mention text. + type_ (google.cloud.language_v1beta2.types.EntityMention.Type): + The type of the entity mention. + sentiment (google.cloud.language_v1beta2.types.Sentiment): + For calls to [AnalyzeEntitySentiment][] or if + [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] + is set to true, this field will contain the sentiment + expressed for this mention of the entity in the provided + document. + """ + class Type(proto.Enum): + r"""The supported types of mentions. + + Values: + TYPE_UNKNOWN (0): + Unknown + PROPER (1): + Proper name + COMMON (2): + Common noun (or noun compound) + """ + TYPE_UNKNOWN = 0 + PROPER = 1 + COMMON = 2 + + text: 'TextSpan' = proto.Field( + proto.MESSAGE, + number=1, + message='TextSpan', + ) + type_: Type = proto.Field( + proto.ENUM, + number=2, + enum=Type, + ) + sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=3, + message='Sentiment', + ) + + +class TextSpan(proto.Message): + r"""Represents an output piece of text. + + Attributes: + content (str): + The content of the output text. + begin_offset (int): + The API calculates the beginning offset of the content in + the original document according to the + [EncodingType][google.cloud.language.v1beta2.EncodingType] + specified in the API request. + """ + + content: str = proto.Field( + proto.STRING, + number=1, + ) + begin_offset: int = proto.Field( + proto.INT32, + number=2, + ) + + +class ClassificationCategory(proto.Message): + r"""Represents a category returned from the text classifier. + + Attributes: + name (str): + The name of the category representing the + document. + confidence (float): + The classifier's confidence of the category. + Number represents how certain the classifier is + that this category represents the given text. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class ClassificationModelOptions(proto.Message): + r"""Model options available for classification requests. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + v1_model (google.cloud.language_v1beta2.types.ClassificationModelOptions.V1Model): + Setting this field will use the V1 model and + V1 content categories version. The V1 model is a + legacy model; support for this will be + discontinued in the future. + + This field is a member of `oneof`_ ``model_type``. + v2_model (google.cloud.language_v1beta2.types.ClassificationModelOptions.V2Model): + Setting this field will use the V2 model with + the appropriate content categories version. The + V2 model is a better performing model. + + This field is a member of `oneof`_ ``model_type``. + """ + + class V1Model(proto.Message): + r"""Options for the V1 model. + """ + + class V2Model(proto.Message): + r"""Options for the V2 model. + + Attributes: + content_categories_version (google.cloud.language_v1beta2.types.ClassificationModelOptions.V2Model.ContentCategoriesVersion): + The content categories used for + classification. + """ + class ContentCategoriesVersion(proto.Enum): + r"""The content categories used for classification. + + Values: + CONTENT_CATEGORIES_VERSION_UNSPECIFIED (0): + If ``ContentCategoriesVersion`` is not specified, this + option will default to ``V1``. + V1 (1): + Legacy content categories of our initial + launch in 2017. + V2 (2): + Updated content categories in 2022. + """ + CONTENT_CATEGORIES_VERSION_UNSPECIFIED = 0 + V1 = 1 + V2 = 2 + + content_categories_version: 'ClassificationModelOptions.V2Model.ContentCategoriesVersion' = proto.Field( + proto.ENUM, + number=1, + enum='ClassificationModelOptions.V2Model.ContentCategoriesVersion', + ) + + v1_model: V1Model = proto.Field( + proto.MESSAGE, + number=1, + oneof='model_type', + message=V1Model, + ) + v2_model: V2Model = proto.Field( + proto.MESSAGE, + number=2, + oneof='model_type', + message=V2Model, + ) + + +class AnalyzeSentimentRequest(proto.Message): + r"""The sentiment analysis request message. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate sentence offsets for the sentence + sentiment. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeSentimentResponse(proto.Message): + r"""The sentiment analysis response message. + + Attributes: + document_sentiment (google.cloud.language_v1beta2.types.Sentiment): + The overall sentiment of the input document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + sentences (MutableSequence[google.cloud.language_v1beta2.types.Sentence]): + The sentiment for all the sentences in the + document. + """ + + document_sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=1, + message='Sentiment', + ) + language: str = proto.Field( + proto.STRING, + number=2, + ) + sentences: MutableSequence['Sentence'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Sentence', + ) + + +class AnalyzeEntitySentimentRequest(proto.Message): + r"""The entity-level sentiment analysis request message. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeEntitySentimentResponse(proto.Message): + r"""The entity-level sentiment analysis response message. + + Attributes: + entities (MutableSequence[google.cloud.language_v1beta2.types.Entity]): + The recognized entities in the input document + with associated sentiments. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + """ + + entities: MutableSequence['Entity'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Entity', + ) + language: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AnalyzeEntitiesRequest(proto.Message): + r"""The entity analysis request message. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeEntitiesResponse(proto.Message): + r"""The entity analysis response message. + + Attributes: + entities (MutableSequence[google.cloud.language_v1beta2.types.Entity]): + The recognized entities in the input + document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + """ + + entities: MutableSequence['Entity'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Entity', + ) + language: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AnalyzeSyntaxRequest(proto.Message): + r"""The syntax analysis request message. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=2, + enum='EncodingType', + ) + + +class AnalyzeSyntaxResponse(proto.Message): + r"""The syntax analysis response message. + + Attributes: + sentences (MutableSequence[google.cloud.language_v1beta2.types.Sentence]): + Sentences in the input document. + tokens (MutableSequence[google.cloud.language_v1beta2.types.Token]): + Tokens, along with their syntactic + information, in the input document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + """ + + sentences: MutableSequence['Sentence'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Sentence', + ) + tokens: MutableSequence['Token'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='Token', + ) + language: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ClassifyTextRequest(proto.Message): + r"""The document classification request message. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + classification_model_options (google.cloud.language_v1beta2.types.ClassificationModelOptions): + Model options to use for classification. + Defaults to v1 options if not specified. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + classification_model_options: 'ClassificationModelOptions' = proto.Field( + proto.MESSAGE, + number=3, + message='ClassificationModelOptions', + ) + + +class ClassifyTextResponse(proto.Message): + r"""The document classification response message. + + Attributes: + categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): + Categories representing the input document. + """ + + categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ClassificationCategory', + ) + + +class ModerateTextRequest(proto.Message): + r"""The document moderation request message. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + """ + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + + +class ModerateTextResponse(proto.Message): + r"""The document moderation response message. + + Attributes: + moderation_categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): + Harmful and sensitive categories representing + the input document. + """ + + moderation_categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ClassificationCategory', + ) + + +class AnnotateTextRequest(proto.Message): + r"""The request message for the text annotation API, which can + perform multiple analysis types (sentiment, entities, and + syntax) in one call. + + Attributes: + document (google.cloud.language_v1beta2.types.Document): + Required. Input document. + features (google.cloud.language_v1beta2.types.AnnotateTextRequest.Features): + Required. The enabled features. + encoding_type (google.cloud.language_v1beta2.types.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + class Features(proto.Message): + r"""All available features for sentiment, syntax, and semantic + analysis. Setting each one to true will enable that specific + analysis for the input. Next ID: 12 + + Attributes: + extract_syntax (bool): + Extract syntax information. + extract_entities (bool): + Extract entities. + extract_document_sentiment (bool): + Extract document-level sentiment. + extract_entity_sentiment (bool): + Extract entities and their associated + sentiment. + classify_text (bool): + Classify the full document into categories. If this is true, + the API will use the default model which classifies into a + `predefined + taxonomy `__. + moderate_text (bool): + Moderate the document for harmful and + sensitive categories. + classification_model_options (google.cloud.language_v1beta2.types.ClassificationModelOptions): + The model options to use for classification. Defaults to v1 + options if not specified. Only used if ``classify_text`` is + set to true. + """ + + extract_syntax: bool = proto.Field( + proto.BOOL, + number=1, + ) + extract_entities: bool = proto.Field( + proto.BOOL, + number=2, + ) + extract_document_sentiment: bool = proto.Field( + proto.BOOL, + number=3, + ) + extract_entity_sentiment: bool = proto.Field( + proto.BOOL, + number=4, + ) + classify_text: bool = proto.Field( + proto.BOOL, + number=6, + ) + moderate_text: bool = proto.Field( + proto.BOOL, + number=11, + ) + classification_model_options: 'ClassificationModelOptions' = proto.Field( + proto.MESSAGE, + number=10, + message='ClassificationModelOptions', + ) + + document: 'Document' = proto.Field( + proto.MESSAGE, + number=1, + message='Document', + ) + features: Features = proto.Field( + proto.MESSAGE, + number=2, + message=Features, + ) + encoding_type: 'EncodingType' = proto.Field( + proto.ENUM, + number=3, + enum='EncodingType', + ) + + +class AnnotateTextResponse(proto.Message): + r"""The text annotations response message. + + Attributes: + sentences (MutableSequence[google.cloud.language_v1beta2.types.Sentence]): + Sentences in the input document. Populated if the user + enables + [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax]. + tokens (MutableSequence[google.cloud.language_v1beta2.types.Token]): + Tokens, along with their syntactic information, in the input + document. Populated if the user enables + [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax]. + entities (MutableSequence[google.cloud.language_v1beta2.types.Entity]): + Entities, along with their semantic information, in the + input document. Populated if the user enables + [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entities]. + document_sentiment (google.cloud.language_v1beta2.types.Sentiment): + The overall sentiment for the document. Populated if the + user enables + [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment]. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): + Categories identified in the input document. + moderation_categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): + Harmful and sensitive categories identified + in the input document. + """ + + sentences: MutableSequence['Sentence'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Sentence', + ) + tokens: MutableSequence['Token'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='Token', + ) + entities: MutableSequence['Entity'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Entity', + ) + document_sentiment: 'Sentiment' = proto.Field( + proto.MESSAGE, + number=4, + message='Sentiment', + ) + language: str = proto.Field( + proto.STRING, + number=5, + ) + categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='ClassificationCategory', + ) + moderation_categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message='ClassificationCategory', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta2/mypy.ini b/owl-bot-staging/v1beta2/mypy.ini new file mode 100644 index 00000000..574c5aed --- /dev/null +++ b/owl-bot-staging/v1beta2/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v1beta2/noxfile.py b/owl-bot-staging/v1beta2/noxfile.py new file mode 100644 index 00000000..95cd6c8b --- /dev/null +++ b/owl-bot-staging/v1beta2/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/language_v1beta2/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py new file mode 100644 index 00000000..ef2d4a6d --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +async def sample_analyze_entities(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entities(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeEntities_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py new file mode 100644 index 00000000..b8c2694b --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +def sample_analyze_entities(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitiesRequest( + document=document, + ) + + # Make the request + response = client.analyze_entities(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeEntities_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py new file mode 100644 index 00000000..818d4209 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntitySentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +async def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py new file mode 100644 index 00000000..cabc3ff5 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeEntitySentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +def sample_analyze_entity_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeEntitySentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_entity_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py new file mode 100644 index 00000000..b60e606a --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeSentiment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +async def sample_analyze_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = await client.analyze_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeSentiment_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py new file mode 100644 index 00000000..df735913 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSentiment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeSentiment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +def sample_analyze_sentiment(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSentimentRequest( + document=document, + ) + + # Make the request + response = client.analyze_sentiment(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeSentiment_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py new file mode 100644 index 00000000..e42a0728 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSyntax +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeSyntax_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +async def sample_analyze_syntax(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = await client.analyze_syntax(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeSyntax_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py new file mode 100644 index 00000000..f9ed77cc --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeSyntax +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnalyzeSyntax_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +def sample_analyze_syntax(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnalyzeSyntaxRequest( + document=document, + ) + + # Make the request + response = client.analyze_syntax(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnalyzeSyntax_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py new file mode 100644 index 00000000..5b17e2b1 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnnotateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnnotateText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +async def sample_annotate_text(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = await client.annotate_text(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnnotateText_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py new file mode 100644 index 00000000..701c94e5 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnnotateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_AnnotateText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +def sample_annotate_text(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.AnnotateTextRequest( + document=document, + ) + + # Make the request + response = client.annotate_text(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_AnnotateText_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py new file mode 100644 index 00000000..94b5ebcc --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ClassifyText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_ClassifyText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +async def sample_classify_text(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = await client.classify_text(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_ClassifyText_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py new file mode 100644 index 00000000..f9415093 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ClassifyText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_ClassifyText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +def sample_classify_text(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ClassifyTextRequest( + document=document, + ) + + # Make the request + response = client.classify_text(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_ClassifyText_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py new file mode 100644 index 00000000..d8385285 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModerateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_ModerateText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +async def sample_moderate_text(): + # Create a client + client = language_v1beta2.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ModerateTextRequest( + document=document, + ) + + # Make the request + response = await client.moderate_text(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_ModerateText_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py new file mode 100644 index 00000000..78d11521 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModerateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-language + + +# [START language_v1beta2_generated_LanguageService_ModerateText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import language_v1beta2 + + +def sample_moderate_text(): + # Create a client + client = language_v1beta2.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1beta2.Document() + document.content = "content_value" + + request = language_v1beta2.ModerateTextRequest( + document=document, + ) + + # Make the request + response = client.moderate_text(request=request) + + # Handle the response + print(response) + +# [END language_v1beta2_generated_LanguageService_ModerateText_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json b/owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json new file mode 100644 index 00000000..fb6633f2 --- /dev/null +++ b/owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json @@ -0,0 +1,1190 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.language.v1beta2", + "version": "v1beta2" + } + ], + "language": "PYTHON", + "name": "google-cloud-language", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_entities", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntities", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse", + "shortName": "analyze_entities" + }, + "description": "Sample for AnalyzeEntities", + "file": "language_v1beta2_generated_language_service_analyze_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntities_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_entities", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntities", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse", + "shortName": "analyze_entities" + }, + "description": "Sample for AnalyzeEntities", + "file": "language_v1beta2_generated_language_service_analyze_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntities_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_entity_sentiment", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntitySentiment", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntitySentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse", + "shortName": "analyze_entity_sentiment" + }, + "description": "Sample for AnalyzeEntitySentiment", + "file": "language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_entity_sentiment", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntitySentiment", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeEntitySentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse", + "shortName": "analyze_entity_sentiment" + }, + "description": "Sample for AnalyzeEntitySentiment", + "file": "language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_sentiment", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSentiment", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeSentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeSentimentResponse", + "shortName": "analyze_sentiment" + }, + "description": "Sample for AnalyzeSentiment", + "file": "language_v1beta2_generated_language_service_analyze_sentiment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSentiment_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_sentiment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_sentiment", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSentiment", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSentiment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeSentimentRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeSentimentResponse", + "shortName": "analyze_sentiment" + }, + "description": "Sample for AnalyzeSentiment", + "file": "language_v1beta2_generated_language_service_analyze_sentiment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSentiment_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_sentiment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_syntax", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSyntax", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSyntax" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse", + "shortName": "analyze_syntax" + }, + "description": "Sample for AnalyzeSyntax", + "file": "language_v1beta2_generated_language_service_analyze_syntax_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSyntax_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_syntax_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_syntax", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSyntax", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnalyzeSyntax" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse", + "shortName": "analyze_syntax" + }, + "description": "Sample for AnalyzeSyntax", + "file": "language_v1beta2_generated_language_service_analyze_syntax_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSyntax_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_analyze_syntax_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.annotate_text", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnnotateText", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnnotateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "features", + "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest.Features" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnnotateTextResponse", + "shortName": "annotate_text" + }, + "description": "Sample for AnnotateText", + "file": "language_v1beta2_generated_language_service_annotate_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnnotateText_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_annotate_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.annotate_text", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.AnnotateText", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "AnnotateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "features", + "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest.Features" + }, + { + "name": "encoding_type", + "type": "google.cloud.language_v1beta2.types.EncodingType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.AnnotateTextResponse", + "shortName": "annotate_text" + }, + "description": "Sample for AnnotateText", + "file": "language_v1beta2_generated_language_service_annotate_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_AnnotateText_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_annotate_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.classify_text", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.ClassifyText", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ClassifyText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.ClassifyTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.ClassifyTextResponse", + "shortName": "classify_text" + }, + "description": "Sample for ClassifyText", + "file": "language_v1beta2_generated_language_service_classify_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_ClassifyText_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_classify_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.classify_text", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.ClassifyText", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ClassifyText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.ClassifyTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.ClassifyTextResponse", + "shortName": "classify_text" + }, + "description": "Sample for ClassifyText", + "file": "language_v1beta2_generated_language_service_classify_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_ClassifyText_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_classify_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.moderate_text", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.ModerateText", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ModerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.ModerateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.ModerateTextResponse", + "shortName": "moderate_text" + }, + "description": "Sample for ModerateText", + "file": "language_v1beta2_generated_language_service_moderate_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_ModerateText_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_moderate_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.moderate_text", + "method": { + "fullName": "google.cloud.language.v1beta2.LanguageService.ModerateText", + "service": { + "fullName": "google.cloud.language.v1beta2.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ModerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1beta2.types.ModerateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1beta2.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1beta2.types.ModerateTextResponse", + "shortName": "moderate_text" + }, + "description": "Sample for ModerateText", + "file": "language_v1beta2_generated_language_service_moderate_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1beta2_generated_LanguageService_ModerateText_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1beta2_generated_language_service_moderate_text_sync.py" + } + ] +} diff --git a/owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py b/owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py new file mode 100644 index 00000000..10fa218c --- /dev/null +++ b/owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py @@ -0,0 +1,182 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class languageCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'analyze_entities': ('document', 'encoding_type', ), + 'analyze_entity_sentiment': ('document', 'encoding_type', ), + 'analyze_sentiment': ('document', 'encoding_type', ), + 'analyze_syntax': ('document', 'encoding_type', ), + 'annotate_text': ('document', 'features', 'encoding_type', ), + 'classify_text': ('document', 'classification_model_options', ), + 'moderate_text': ('document', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=languageCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the language client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1beta2/setup.py b/owl-bot-staging/v1beta2/setup.py new file mode 100644 index 00000000..047e5bce --- /dev/null +++ b/owl-bot-staging/v1beta2/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-language' + + +description = "Google Cloud Language API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/language/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-language" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.10.txt b/owl-bot-staging/v1beta2/testing/constraints-3.10.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1beta2/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.11.txt b/owl-bot-staging/v1beta2/testing/constraints-3.11.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1beta2/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.12.txt b/owl-bot-staging/v1beta2/testing/constraints-3.12.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1beta2/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.7.txt b/owl-bot-staging/v1beta2/testing/constraints-3.7.txt new file mode 100644 index 00000000..6c44adfe --- /dev/null +++ b/owl-bot-staging/v1beta2/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.8.txt b/owl-bot-staging/v1beta2/testing/constraints-3.8.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1beta2/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.9.txt b/owl-bot-staging/v1beta2/testing/constraints-3.9.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/owl-bot-staging/v1beta2/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1beta2/tests/__init__.py b/owl-bot-staging/v1beta2/tests/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1beta2/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta2/tests/unit/__init__.py b/owl-bot-staging/v1beta2/tests/unit/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1beta2/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py b/owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py b/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py new file mode 100644 index 00000000..231bc125 --- /dev/null +++ b/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py b/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py new file mode 100644 index 00000000..3e0b7671 --- /dev/null +++ b/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py @@ -0,0 +1,4070 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.language_v1beta2.services.language_service import LanguageServiceAsyncClient +from google.cloud.language_v1beta2.services.language_service import LanguageServiceClient +from google.cloud.language_v1beta2.services.language_service import transports +from google.cloud.language_v1beta2.types import language_service +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LanguageServiceClient._get_default_mtls_endpoint(None) is None + assert LanguageServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert LanguageServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (LanguageServiceClient, "grpc"), + (LanguageServiceAsyncClient, "grpc_asyncio"), + (LanguageServiceClient, "rest"), +]) +def test_language_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'language.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://language.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.LanguageServiceGrpcTransport, "grpc"), + (transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.LanguageServiceRestTransport, "rest"), +]) +def test_language_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (LanguageServiceClient, "grpc"), + (LanguageServiceAsyncClient, "grpc_asyncio"), + (LanguageServiceClient, "rest"), +]) +def test_language_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'language.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://language.googleapis.com' + ) + + +def test_language_service_client_get_transport_class(): + transport = LanguageServiceClient.get_transport_class() + available_transports = [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceRestTransport, + ] + assert transport in available_transports + + transport = LanguageServiceClient.get_transport_class("grpc") + assert transport == transports.LanguageServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), +]) +@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) +@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) +def test_language_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "true"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "false"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "true"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) +@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_language_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + LanguageServiceClient, LanguageServiceAsyncClient +]) +@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) +@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) +def test_language_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), +]) +def test_language_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", None), +]) +def test_language_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_language_service_client_client_options_from_dict(): + with mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = LanguageServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_language_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "language.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="language.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSentimentRequest, + dict, +]) +def test_analyze_sentiment(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse( + language='language_value', + ) + response = client.analyze_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_sentiment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + client.analyze_sentiment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSentimentRequest() + +@pytest.mark.asyncio +async def test_analyze_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSentimentRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse( + language='language_value', + )) + response = await client.analyze_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_sentiment_async_from_dict(): + await test_analyze_sentiment_async(request_type=dict) + + +def test_analyze_sentiment_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_sentiment_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_sentiment_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_sentiment_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitiesRequest, + dict, +]) +def test_analyze_entities(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse( + language='language_value', + ) + response = client.analyze_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + assert response.language == 'language_value' + + +def test_analyze_entities_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + client.analyze_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitiesRequest() + +@pytest.mark.asyncio +async def test_analyze_entities_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitiesRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse( + language='language_value', + )) + response = await client.analyze_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_entities_async_from_dict(): + await test_analyze_entities_async(request_type=dict) + + +def test_analyze_entities_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_entities( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_entities_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_entities_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entities), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_entities( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_entities_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitySentimentRequest, + dict, +]) +def test_analyze_entity_sentiment(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse( + language='language_value', + ) + response = client.analyze_entity_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_entity_sentiment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + client.analyze_entity_sentiment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitySentimentRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse( + language='language_value', + )) + response = await client.analyze_entity_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_async_from_dict(): + await test_analyze_entity_sentiment_async(request_type=dict) + + +def test_analyze_entity_sentiment_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_entity_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_entity_sentiment_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_entity_sentiment( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSyntaxRequest, + dict, +]) +def test_analyze_syntax(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse( + language='language_value', + ) + response = client.analyze_syntax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSyntaxRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + assert response.language == 'language_value' + + +def test_analyze_syntax_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + client.analyze_syntax() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSyntaxRequest() + +@pytest.mark.asyncio +async def test_analyze_syntax_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSyntaxRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse( + language='language_value', + )) + response = await client.analyze_syntax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnalyzeSyntaxRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_analyze_syntax_async_from_dict(): + await test_analyze_syntax_async(request_type=dict) + + +def test_analyze_syntax_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_syntax( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_analyze_syntax_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_analyze_syntax_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_syntax), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_syntax( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_syntax_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.ClassifyTextRequest, + dict, +]) +def test_classify_text(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse( + ) + response = client.classify_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ClassifyTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +def test_classify_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + client.classify_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ClassifyTextRequest() + +@pytest.mark.asyncio +async def test_classify_text_async(transport: str = 'grpc_asyncio', request_type=language_service.ClassifyTextRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse( + )) + response = await client.classify_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ClassifyTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +@pytest.mark.asyncio +async def test_classify_text_async_from_dict(): + await test_classify_text_async(request_type=dict) + + +def test_classify_text_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.classify_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + + +def test_classify_text_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + +@pytest.mark.asyncio +async def test_classify_text_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.classify_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.classify_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_classify_text_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.ModerateTextRequest, + dict, +]) +def test_moderate_text(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse( + ) + response = client.moderate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +def test_moderate_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + client.moderate_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + +@pytest.mark.asyncio +async def test_moderate_text_async(transport: str = 'grpc_asyncio', request_type=language_service.ModerateTextRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.ModerateTextResponse( + )) + response = await client.moderate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +@pytest.mark.asyncio +async def test_moderate_text_async_from_dict(): + await test_moderate_text_async(request_type=dict) + + +def test_moderate_text_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.moderate_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + + +def test_moderate_text_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + +@pytest.mark.asyncio +async def test_moderate_text_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.moderate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.ModerateTextResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.moderate_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_moderate_text_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnnotateTextRequest, + dict, +]) +def test_annotate_text(request_type, transport: str = 'grpc'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse( + language='language_value', + ) + response = client.annotate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnnotateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + assert response.language == 'language_value' + + +def test_annotate_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + client.annotate_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnnotateTextRequest() + +@pytest.mark.asyncio +async def test_annotate_text_async(transport: str = 'grpc_asyncio', request_type=language_service.AnnotateTextRequest): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse( + language='language_value', + )) + response = await client.annotate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.AnnotateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + assert response.language == 'language_value' + + +@pytest.mark.asyncio +async def test_annotate_text_async_from_dict(): + await test_annotate_text_async(request_type=dict) + + +def test_annotate_text_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.annotate_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].features + mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + + +def test_annotate_text_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + +@pytest.mark.asyncio +async def test_annotate_text_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.annotate_text), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.annotate_text( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) + assert arg == mock_val + arg = args[0].features + mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_annotate_text_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSentimentRequest, + dict, +]) +def test_analyze_sentiment_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_sentiment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_sentiment_rest_required_fields(request_type=language_service.AnalyzeSentimentRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_sentiment(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_sentiment_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_sentiment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_sentiment_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_sentiment") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_sentiment") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeSentimentRequest.pb(language_service.AnalyzeSentimentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeSentimentResponse.to_json(language_service.AnalyzeSentimentResponse()) + + request = language_service.AnalyzeSentimentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeSentimentResponse() + + client.analyze_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSentimentRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_sentiment(request) + + +def test_analyze_sentiment_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_sentiment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta2/documents:analyzeSentiment" % client.transport._host, args[1]) + + +def test_analyze_sentiment_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_sentiment_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitiesRequest, + dict, +]) +def test_analyze_entities_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_entities(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + assert response.language == 'language_value' + + +def test_analyze_entities_rest_required_fields(request_type=language_service.AnalyzeEntitiesRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_entities(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_entities_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_entities._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_entities_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entities") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entities") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeEntitiesRequest.pb(language_service.AnalyzeEntitiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeEntitiesResponse.to_json(language_service.AnalyzeEntitiesResponse()) + + request = language_service.AnalyzeEntitiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeEntitiesResponse() + + client.analyze_entities(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_entities_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitiesRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_entities(request) + + +def test_analyze_entities_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_entities(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta2/documents:analyzeEntities" % client.transport._host, args[1]) + + +def test_analyze_entities_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entities_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeEntitySentimentRequest, + dict, +]) +def test_analyze_entity_sentiment_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_entity_sentiment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + assert response.language == 'language_value' + + +def test_analyze_entity_sentiment_rest_required_fields(request_type=language_service.AnalyzeEntitySentimentRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_entity_sentiment(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_entity_sentiment_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_entity_sentiment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entity_sentiment") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entity_sentiment") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeEntitySentimentRequest.pb(language_service.AnalyzeEntitySentimentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeEntitySentimentResponse.to_json(language_service.AnalyzeEntitySentimentResponse()) + + request = language_service.AnalyzeEntitySentimentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeEntitySentimentResponse() + + client.analyze_entity_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_entity_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitySentimentRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_entity_sentiment(request) + + +def test_analyze_entity_sentiment_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_entity_sentiment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta2/documents:analyzeEntitySentiment" % client.transport._host, args[1]) + + +def test_analyze_entity_sentiment_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entity_sentiment_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnalyzeSyntaxRequest, + dict, +]) +def test_analyze_syntax_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_syntax(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + assert response.language == 'language_value' + + +def test_analyze_syntax_rest_required_fields(request_type=language_service.AnalyzeSyntaxRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_syntax(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_syntax_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_syntax._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_syntax_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_syntax") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_syntax") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeSyntaxRequest.pb(language_service.AnalyzeSyntaxRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeSyntaxResponse.to_json(language_service.AnalyzeSyntaxResponse()) + + request = language_service.AnalyzeSyntaxRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeSyntaxResponse() + + client.analyze_syntax(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_syntax_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSyntaxRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_syntax(request) + + +def test_analyze_syntax_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_syntax(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta2/documents:analyzeSyntax" % client.transport._host, args[1]) + + +def test_analyze_syntax_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_syntax_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.ClassifyTextRequest, + dict, +]) +def test_classify_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.classify_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +def test_classify_text_rest_required_fields(request_type=language_service.ClassifyTextRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.classify_text(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_classify_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.classify_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_classify_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_classify_text") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_classify_text") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.ClassifyTextRequest.pb(language_service.ClassifyTextRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.ClassifyTextResponse.to_json(language_service.ClassifyTextResponse()) + + request = language_service.ClassifyTextRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.ClassifyTextResponse() + + client.classify_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_classify_text_rest_bad_request(transport: str = 'rest', request_type=language_service.ClassifyTextRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.classify_text(request) + + +def test_classify_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.classify_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta2/documents:classifyText" % client.transport._host, args[1]) + + +def test_classify_text_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + +def test_classify_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.ModerateTextRequest, + dict, +]) +def test_moderate_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.moderate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +def test_moderate_text_rest_required_fields(request_type=language_service.ModerateTextRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).moderate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).moderate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.moderate_text(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_moderate_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.moderate_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_moderate_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_moderate_text") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_moderate_text") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.ModerateTextRequest.pb(language_service.ModerateTextRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.ModerateTextResponse.to_json(language_service.ModerateTextResponse()) + + request = language_service.ModerateTextRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.ModerateTextResponse() + + client.moderate_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_moderate_text_rest_bad_request(transport: str = 'rest', request_type=language_service.ModerateTextRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.moderate_text(request) + + +def test_moderate_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.moderate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta2/documents:moderateText" % client.transport._host, args[1]) + + +def test_moderate_text_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + ) + + +def test_moderate_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + language_service.AnnotateTextRequest, + dict, +]) +def test_annotate_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse( + language='language_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.annotate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + assert response.language == 'language_value' + + +def test_annotate_text_rest_required_fields(request_type=language_service.AnnotateTextRequest): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.annotate_text(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_annotate_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.annotate_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document", "features", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_annotate_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "post_annotate_text") as post, \ + mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_annotate_text") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnnotateTextRequest.pb(language_service.AnnotateTextRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnnotateTextResponse.to_json(language_service.AnnotateTextResponse()) + + request = language_service.AnnotateTextRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnnotateTextResponse() + + client.annotate_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_annotate_text_rest_bad_request(transport: str = 'rest', request_type=language_service.AnnotateTextRequest): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.annotate_text(request) + + +def test_annotate_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.annotate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta2/documents:annotateText" % client.transport._host, args[1]) + + +def test_annotate_text_rest_flattened_error(transport: str = 'rest'): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_annotate_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LanguageServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LanguageServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + transports.LanguageServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = LanguageServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LanguageServiceGrpcTransport, + ) + +def test_language_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LanguageServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_language_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.LanguageServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'analyze_sentiment', + 'analyze_entities', + 'analyze_entity_sentiment', + 'analyze_syntax', + 'classify_text', + 'moderate_text', + 'annotate_text', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_language_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LanguageServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_language_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LanguageServiceTransport() + adc.assert_called_once() + + +def test_language_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LanguageServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + ], +) +def test_language_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-language', 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + transports.LanguageServiceRestTransport, + ], +) +def test_language_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LanguageServiceGrpcTransport, grpc_helpers), + (transports.LanguageServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_language_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "language.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-language', + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="language.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) +def test_language_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_language_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.LanguageServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_language_service_host_no_port(transport_name): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'language.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://language.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_language_service_host_with_port(transport_name): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'language.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://language.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_language_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LanguageServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LanguageServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.analyze_sentiment._session + session2 = client2.transport.analyze_sentiment._session + assert session1 != session2 + session1 = client1.transport.analyze_entities._session + session2 = client2.transport.analyze_entities._session + assert session1 != session2 + session1 = client1.transport.analyze_entity_sentiment._session + session2 = client2.transport.analyze_entity_sentiment._session + assert session1 != session2 + session1 = client1.transport.analyze_syntax._session + session2 = client2.transport.analyze_syntax._session + assert session1 != session2 + session1 = client1.transport.classify_text._session + session2 = client2.transport.classify_text._session + assert session1 != session2 + session1 = client1.transport.moderate_text._session + session2 = client2.transport.moderate_text._session + assert session1 != session2 + session1 = client1.transport.annotate_text._session + session2 = client2.transport.annotate_text._session + assert session1 != session2 +def test_language_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LanguageServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_language_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LanguageServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) +def test_language_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) +def test_language_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = LanguageServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = LanguageServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = LanguageServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = LanguageServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = LanguageServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = LanguageServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = LanguageServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = LanguageServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = LanguageServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = LanguageServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LanguageServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = LanguageServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport), + (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 1e9e6aef0ec2f4b3d9d1c49bd1b18bacc89e0d3c Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 25 May 2023 00:51:58 +0000 Subject: [PATCH 4/4] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/cloud/language/__init__.py | 4 + google/cloud/language_v1/__init__.py | 4 + google/cloud/language_v1/gapic_metadata.json | 15 + .../services/language_service/async_client.py | 98 + .../services/language_service/client.py | 98 + .../language_service/transports/base.py | 17 + .../language_service/transports/grpc.py | 29 + .../transports/grpc_asyncio.py | 30 + .../language_service/transports/rest.py | 139 + google/cloud/language_v1/types/__init__.py | 4 + .../language_v1/types/language_service.py | 57 +- owl-bot-staging/v1/.coveragerc | 13 - owl-bot-staging/v1/.flake8 | 33 - owl-bot-staging/v1/MANIFEST.in | 2 - owl-bot-staging/v1/README.rst | 49 - owl-bot-staging/v1/docs/conf.py | 376 -- owl-bot-staging/v1/docs/index.rst | 7 - .../v1/docs/language_v1/language_service.rst | 6 - .../v1/docs/language_v1/services.rst | 6 - owl-bot-staging/v1/docs/language_v1/types.rst | 6 - .../v1/google/cloud/language/__init__.py | 79 - .../v1/google/cloud/language/gapic_version.py | 16 - .../v1/google/cloud/language/py.typed | 2 - .../v1/google/cloud/language_v1/__init__.py | 80 - .../cloud/language_v1/gapic_metadata.json | 133 - .../google/cloud/language_v1/gapic_version.py | 16 - .../v1/google/cloud/language_v1/py.typed | 2 - .../cloud/language_v1/services/__init__.py | 15 - .../services/language_service/__init__.py | 22 - .../services/language_service/async_client.py | 962 ---- .../services/language_service/client.py | 1115 ----- .../language_service/transports/__init__.py | 38 - .../language_service/transports/base.py | 275 -- .../language_service/transports/grpc.py | 432 -- .../transports/grpc_asyncio.py | 431 -- .../language_service/transports/rest.py | 1029 ----- .../cloud/language_v1/types/__init__.py | 72 - .../language_v1/types/language_service.py | 1724 ------- owl-bot-staging/v1/mypy.ini | 3 - owl-bot-staging/v1/noxfile.py | 184 - ...language_service_analyze_entities_async.py | 55 - ..._language_service_analyze_entities_sync.py | 55 - ..._service_analyze_entity_sentiment_async.py | 55 - ...e_service_analyze_entity_sentiment_sync.py | 55 - ...anguage_service_analyze_sentiment_async.py | 55 - ...language_service_analyze_sentiment_sync.py | 55 - ...d_language_service_analyze_syntax_async.py | 55 - ...ed_language_service_analyze_syntax_sync.py | 55 - ...ed_language_service_annotate_text_async.py | 55 - ...ted_language_service_annotate_text_sync.py | 55 - ...ed_language_service_classify_text_async.py | 55 - ...ted_language_service_classify_text_sync.py | 55 - ...pet_metadata_google.cloud.language.v1.json | 1190 ----- .../v1/scripts/fixup_language_v1_keywords.py | 182 - owl-bot-staging/v1/setup.py | 90 - .../v1/testing/constraints-3.10.txt | 6 - .../v1/testing/constraints-3.11.txt | 6 - .../v1/testing/constraints-3.12.txt | 6 - .../v1/testing/constraints-3.7.txt | 9 - .../v1/testing/constraints-3.8.txt | 6 - .../v1/testing/constraints-3.9.txt | 6 - owl-bot-staging/v1/tests/__init__.py | 16 - owl-bot-staging/v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../tests/unit/gapic/language_v1/__init__.py | 16 - .../language_v1/test_language_service.py | 4070 ----------------- owl-bot-staging/v1beta2/.coveragerc | 13 - owl-bot-staging/v1beta2/.flake8 | 33 - owl-bot-staging/v1beta2/MANIFEST.in | 2 - owl-bot-staging/v1beta2/README.rst | 49 - owl-bot-staging/v1beta2/docs/conf.py | 376 -- owl-bot-staging/v1beta2/docs/index.rst | 7 - .../language_v1beta2/language_service.rst | 6 - .../docs/language_v1beta2/services.rst | 6 - .../v1beta2/docs/language_v1beta2/types.rst | 6 - .../v1beta2/google/cloud/language/__init__.py | 79 - .../google/cloud/language/gapic_version.py | 16 - .../v1beta2/google/cloud/language/py.typed | 2 - .../google/cloud/language_v1beta2/__init__.py | 80 - .../language_v1beta2/gapic_metadata.json | 133 - .../cloud/language_v1beta2/gapic_version.py | 16 - .../google/cloud/language_v1beta2/py.typed | 2 - .../language_v1beta2/services/__init__.py | 15 - .../services/language_service/__init__.py | 22 - .../services/language_service/async_client.py | 963 ---- .../services/language_service/client.py | 1116 ----- .../language_service/transports/__init__.py | 38 - .../language_service/transports/base.py | 275 -- .../language_service/transports/grpc.py | 432 -- .../transports/grpc_asyncio.py | 431 -- .../language_service/transports/rest.py | 1029 ----- .../cloud/language_v1beta2/types/__init__.py | 72 - .../types/language_service.py | 1761 ------- owl-bot-staging/v1beta2/mypy.ini | 3 - owl-bot-staging/v1beta2/noxfile.py | 184 - ...language_service_analyze_entities_async.py | 55 - ..._language_service_analyze_entities_sync.py | 55 - ..._service_analyze_entity_sentiment_async.py | 55 - ...e_service_analyze_entity_sentiment_sync.py | 55 - ...anguage_service_analyze_sentiment_async.py | 55 - ...language_service_analyze_sentiment_sync.py | 55 - ...d_language_service_analyze_syntax_async.py | 55 - ...ed_language_service_analyze_syntax_sync.py | 55 - ...ed_language_service_annotate_text_async.py | 55 - ...ted_language_service_annotate_text_sync.py | 55 - ...ed_language_service_classify_text_async.py | 55 - ...ted_language_service_classify_text_sync.py | 55 - ...ed_language_service_moderate_text_async.py | 55 - ...ted_language_service_moderate_text_sync.py | 55 - ...etadata_google.cloud.language.v1beta2.json | 1190 ----- .../fixup_language_v1beta2_keywords.py | 182 - owl-bot-staging/v1beta2/setup.py | 90 - .../v1beta2/testing/constraints-3.10.txt | 6 - .../v1beta2/testing/constraints-3.11.txt | 6 - .../v1beta2/testing/constraints-3.12.txt | 6 - .../v1beta2/testing/constraints-3.7.txt | 9 - .../v1beta2/testing/constraints-3.8.txt | 6 - .../v1beta2/testing/constraints-3.9.txt | 6 - owl-bot-staging/v1beta2/tests/__init__.py | 16 - .../v1beta2/tests/unit/__init__.py | 16 - .../v1beta2/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/language_v1beta2/__init__.py | 16 - .../language_v1beta2/test_language_service.py | 4070 ----------------- ...ed_language_service_moderate_text_async.py | 0 ...ted_language_service_moderate_text_sync.py | 0 ...pet_metadata_google.cloud.language.v1.json | 161 + scripts/fixup_language_v1_keywords.py | 1 + .../language_v1/test_language_service.py | 444 ++ 128 files changed, 1098 insertions(+), 26998 deletions(-) delete mode 100644 owl-bot-staging/v1/.coveragerc delete mode 100644 owl-bot-staging/v1/.flake8 delete mode 100644 owl-bot-staging/v1/MANIFEST.in delete mode 100644 owl-bot-staging/v1/README.rst delete mode 100644 owl-bot-staging/v1/docs/conf.py delete mode 100644 owl-bot-staging/v1/docs/index.rst delete mode 100644 owl-bot-staging/v1/docs/language_v1/language_service.rst delete mode 100644 owl-bot-staging/v1/docs/language_v1/services.rst delete mode 100644 owl-bot-staging/v1/docs/language_v1/types.rst delete mode 100644 owl-bot-staging/v1/google/cloud/language/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/language/gapic_version.py delete mode 100644 owl-bot-staging/v1/google/cloud/language/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py delete mode 100644 owl-bot-staging/v1/mypy.ini delete mode 100644 owl-bot-staging/v1/noxfile.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json delete mode 100644 owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py delete mode 100644 owl-bot-staging/v1/setup.py delete mode 100644 owl-bot-staging/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/v1/tests/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py delete mode 100644 owl-bot-staging/v1beta2/.coveragerc delete mode 100644 owl-bot-staging/v1beta2/.flake8 delete mode 100644 owl-bot-staging/v1beta2/MANIFEST.in delete mode 100644 owl-bot-staging/v1beta2/README.rst delete mode 100644 owl-bot-staging/v1beta2/docs/conf.py delete mode 100644 owl-bot-staging/v1beta2/docs/index.rst delete mode 100644 owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst delete mode 100644 owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst delete mode 100644 owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language/__init__.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language/py.typed delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py delete mode 100644 owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py delete mode 100644 owl-bot-staging/v1beta2/mypy.ini delete mode 100644 owl-bot-staging/v1beta2/noxfile.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py delete mode 100644 owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json delete mode 100644 owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py delete mode 100644 owl-bot-staging/v1beta2/setup.py delete mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/v1beta2/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/v1beta2/tests/__init__.py delete mode 100644 owl-bot-staging/v1beta2/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py delete mode 100644 owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py rename {owl-bot-staging/v1/samples => samples}/generated_samples/language_v1_generated_language_service_moderate_text_async.py (100%) rename {owl-bot-staging/v1/samples => samples}/generated_samples/language_v1_generated_language_service_moderate_text_sync.py (100%) diff --git a/google/cloud/language/__init__.py b/google/cloud/language/__init__.py index 3e7674b2..a6faa72d 100644 --- a/google/cloud/language/__init__.py +++ b/google/cloud/language/__init__.py @@ -44,6 +44,8 @@ EncodingType, Entity, EntityMention, + ModerateTextRequest, + ModerateTextResponse, PartOfSpeech, Sentence, Sentiment, @@ -72,6 +74,8 @@ "Document", "Entity", "EntityMention", + "ModerateTextRequest", + "ModerateTextResponse", "PartOfSpeech", "Sentence", "Sentiment", diff --git a/google/cloud/language_v1/__init__.py b/google/cloud/language_v1/__init__.py index 6df3e48a..166ac220 100644 --- a/google/cloud/language_v1/__init__.py +++ b/google/cloud/language_v1/__init__.py @@ -39,6 +39,8 @@ EncodingType, Entity, EntityMention, + ModerateTextRequest, + ModerateTextResponse, PartOfSpeech, Sentence, Sentiment, @@ -68,6 +70,8 @@ "Entity", "EntityMention", "LanguageServiceClient", + "ModerateTextRequest", + "ModerateTextResponse", "PartOfSpeech", "Sentence", "Sentiment", diff --git a/google/cloud/language_v1/gapic_metadata.json b/google/cloud/language_v1/gapic_metadata.json index e475aad9..fa2c065a 100644 --- a/google/cloud/language_v1/gapic_metadata.json +++ b/google/cloud/language_v1/gapic_metadata.json @@ -39,6 +39,11 @@ "methods": [ "classify_text" ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] } } }, @@ -74,6 +79,11 @@ "methods": [ "classify_text" ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] } } }, @@ -109,6 +119,11 @@ "methods": [ "classify_text" ] + }, + "ModerateText": { + "methods": [ + "moderate_text" + ] } } } diff --git a/google/cloud/language_v1/services/language_service/async_client.py b/google/cloud/language_v1/services/language_service/async_client.py index 8389f19c..54fda3ae 100644 --- a/google/cloud/language_v1/services/language_service/async_client.py +++ b/google/cloud/language_v1/services/language_service/async_client.py @@ -792,6 +792,104 @@ async def sample_classify_text(): # Done; return the response. return response + async def moderate_text( + self, + request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ModerateTextResponse: + r"""Moderates a document for harmful and sensitive + categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + async def sample_moderate_text(): + # Create a client + client = language_v1.LanguageServiceAsyncClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.ModerateTextRequest( + document=document, + ) + + # Make the request + response = await client.moderate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.language_v1.types.ModerateTextRequest, dict]]): + The request object. The document moderation request + message. + document (:class:`google.cloud.language_v1.types.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.ModerateTextResponse: + The document moderation response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = language_service.ModerateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.moderate_text, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def annotate_text( self, request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, diff --git a/google/cloud/language_v1/services/language_service/client.py b/google/cloud/language_v1/services/language_service/client.py index d79cffc5..e9b4ccec 100644 --- a/google/cloud/language_v1/services/language_service/client.py +++ b/google/cloud/language_v1/services/language_service/client.py @@ -951,6 +951,104 @@ def sample_classify_text(): # Done; return the response. return response + def moderate_text( + self, + request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, + *, + document: Optional[language_service.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ModerateTextResponse: + r"""Moderates a document for harmful and sensitive + categories. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import language_v1 + + def sample_moderate_text(): + # Create a client + client = language_v1.LanguageServiceClient() + + # Initialize request argument(s) + document = language_v1.Document() + document.content = "content_value" + + request = language_v1.ModerateTextRequest( + document=document, + ) + + # Make the request + response = client.moderate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.language_v1.types.ModerateTextRequest, dict]): + The request object. The document moderation request + message. + document (google.cloud.language_v1.types.Document): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.language_v1.types.ModerateTextResponse: + The document moderation response + message. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.ModerateTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.ModerateTextRequest): + request = language_service.ModerateTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.moderate_text] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def annotate_text( self, request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, diff --git a/google/cloud/language_v1/services/language_service/transports/base.py b/google/cloud/language_v1/services/language_service/transports/base.py index d8bfc5d9..c900f0af 100644 --- a/google/cloud/language_v1/services/language_service/transports/base.py +++ b/google/cloud/language_v1/services/language_service/transports/base.py @@ -200,6 +200,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.moderate_text: gapic_v1.method.wrap_method( + self.moderate_text, + default_timeout=None, + client_info=client_info, + ), self.annotate_text: gapic_v1.method.wrap_method( self.annotate_text, default_retry=retries.Retry( @@ -286,6 +291,18 @@ def classify_text( ]: raise NotImplementedError() + @property + def moderate_text( + self, + ) -> Callable[ + [language_service.ModerateTextRequest], + Union[ + language_service.ModerateTextResponse, + Awaitable[language_service.ModerateTextResponse], + ], + ]: + raise NotImplementedError() + @property def annotate_text( self, diff --git a/google/cloud/language_v1/services/language_service/transports/grpc.py b/google/cloud/language_v1/services/language_service/transports/grpc.py index f46b19fd..dd9abdb0 100644 --- a/google/cloud/language_v1/services/language_service/transports/grpc.py +++ b/google/cloud/language_v1/services/language_service/transports/grpc.py @@ -380,6 +380,35 @@ def classify_text( ) return self._stubs["classify_text"] + @property + def moderate_text( + self, + ) -> Callable[ + [language_service.ModerateTextRequest], language_service.ModerateTextResponse + ]: + r"""Return a callable for the moderate text method over gRPC. + + Moderates a document for harmful and sensitive + categories. + + Returns: + Callable[[~.ModerateTextRequest], + ~.ModerateTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "moderate_text" not in self._stubs: + self._stubs["moderate_text"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1.LanguageService/ModerateText", + request_serializer=language_service.ModerateTextRequest.serialize, + response_deserializer=language_service.ModerateTextResponse.deserialize, + ) + return self._stubs["moderate_text"] + @property def annotate_text( self, diff --git a/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py b/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py index dd19f8f1..00e1ea21 100644 --- a/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py +++ b/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py @@ -385,6 +385,36 @@ def classify_text( ) return self._stubs["classify_text"] + @property + def moderate_text( + self, + ) -> Callable[ + [language_service.ModerateTextRequest], + Awaitable[language_service.ModerateTextResponse], + ]: + r"""Return a callable for the moderate text method over gRPC. + + Moderates a document for harmful and sensitive + categories. + + Returns: + Callable[[~.ModerateTextRequest], + Awaitable[~.ModerateTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "moderate_text" not in self._stubs: + self._stubs["moderate_text"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1.LanguageService/ModerateText", + request_serializer=language_service.ModerateTextRequest.serialize, + response_deserializer=language_service.ModerateTextResponse.deserialize, + ) + return self._stubs["moderate_text"] + @property def annotate_text( self, diff --git a/google/cloud/language_v1/services/language_service/transports/rest.py b/google/cloud/language_v1/services/language_service/transports/rest.py index d92e6672..990da259 100644 --- a/google/cloud/language_v1/services/language_service/transports/rest.py +++ b/google/cloud/language_v1/services/language_service/transports/rest.py @@ -111,6 +111,14 @@ def post_classify_text(self, response): logging.log(f"Received response: {response}") return response + def pre_moderate_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_moderate_text(self, response): + logging.log(f"Received response: {response}") + return response + transport = LanguageServiceRestTransport(interceptor=MyCustomLanguageServiceInterceptor()) client = LanguageServiceClient(transport=transport) @@ -257,6 +265,29 @@ def post_classify_text( """ return response + def pre_moderate_text( + self, + request: language_service.ModerateTextRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[language_service.ModerateTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for moderate_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_moderate_text( + self, response: language_service.ModerateTextResponse + ) -> language_service.ModerateTextResponse: + """Post-rpc interceptor for moderate_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class LanguageServiceRestStub: @@ -945,6 +976,104 @@ def __call__( resp = self._interceptor.post_classify_text(resp) return resp + class _ModerateText(LanguageServiceRestStub): + def __hash__(self): + return hash("ModerateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: language_service.ModerateTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ModerateTextResponse: + r"""Call the moderate text method over HTTP. + + Args: + request (~.language_service.ModerateTextRequest): + The request object. The document moderation request + message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ModerateTextResponse: + The document moderation response + message. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/documents:moderateText", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_moderate_text(request, metadata) + pb_request = language_service.ModerateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.ModerateTextResponse() + pb_resp = language_service.ModerateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_moderate_text(resp) + return resp + @property def analyze_entities( self, @@ -1008,6 +1137,16 @@ def classify_text( # In C++ this would require a dynamic_cast return self._ClassifyText(self._session, self._host, self._interceptor) # type: ignore + @property + def moderate_text( + self, + ) -> Callable[ + [language_service.ModerateTextRequest], language_service.ModerateTextResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ModerateText(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/google/cloud/language_v1/types/__init__.py b/google/cloud/language_v1/types/__init__.py index b4b20c9c..4f98d4d3 100644 --- a/google/cloud/language_v1/types/__init__.py +++ b/google/cloud/language_v1/types/__init__.py @@ -33,6 +33,8 @@ EncodingType, Entity, EntityMention, + ModerateTextRequest, + ModerateTextResponse, PartOfSpeech, Sentence, Sentiment, @@ -59,6 +61,8 @@ "Document", "Entity", "EntityMention", + "ModerateTextRequest", + "ModerateTextResponse", "PartOfSpeech", "Sentence", "Sentiment", diff --git a/google/cloud/language_v1/types/language_service.py b/google/cloud/language_v1/types/language_service.py index bd330ffe..da423978 100644 --- a/google/cloud/language_v1/types/language_service.py +++ b/google/cloud/language_v1/types/language_service.py @@ -44,6 +44,8 @@ "AnalyzeSyntaxResponse", "ClassifyTextRequest", "ClassifyTextResponse", + "ModerateTextRequest", + "ModerateTextResponse", "AnnotateTextRequest", "AnnotateTextResponse", }, @@ -1194,9 +1196,8 @@ class ClassificationCategory(proto.Message): Attributes: name (str): - The name of the category representing the document, from the - `predefined - taxonomy `__. + The name of the category representing the + document. confidence (float): The classifier's confidence of the category. Number represents how certain the classifier is @@ -1536,6 +1537,39 @@ class ClassifyTextResponse(proto.Message): ) +class ModerateTextRequest(proto.Message): + r"""The document moderation request message. + + Attributes: + document (google.cloud.language_v1.types.Document): + Required. Input document. + """ + + document: "Document" = proto.Field( + proto.MESSAGE, + number=1, + message="Document", + ) + + +class ModerateTextResponse(proto.Message): + r"""The document moderation response message. + + Attributes: + moderation_categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): + Harmful and sensitive categories representing + the input document. + """ + + moderation_categories: MutableSequence[ + "ClassificationCategory" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ClassificationCategory", + ) + + class AnnotateTextRequest(proto.Message): r"""The request message for the text annotation API, which can perform multiple analysis types (sentiment, entities, and @@ -1568,6 +1602,9 @@ class Features(proto.Message): sentiment. classify_text (bool): Classify the full document into categories. + moderate_text (bool): + Moderate the document for harmful and + sensitive categories. classification_model_options (google.cloud.language_v1.types.ClassificationModelOptions): The model options to use for classification. Defaults to v1 options if not specified. Only used if ``classify_text`` is @@ -1594,6 +1631,10 @@ class Features(proto.Message): proto.BOOL, number=6, ) + moderate_text: bool = proto.Field( + proto.BOOL, + number=11, + ) classification_model_options: "ClassificationModelOptions" = proto.Field( proto.MESSAGE, number=10, @@ -1645,6 +1686,9 @@ class AnnotateTextResponse(proto.Message): field for more details. categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): Categories identified in the input document. + moderation_categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): + Harmful and sensitive categories identified + in the input document. """ sentences: MutableSequence["Sentence"] = proto.RepeatedField( @@ -1676,6 +1720,13 @@ class AnnotateTextResponse(proto.Message): number=6, message="ClassificationCategory", ) + moderation_categories: MutableSequence[ + "ClassificationCategory" + ] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="ClassificationCategory", + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc deleted file mode 100644 index c1f51536..00000000 --- a/owl-bot-staging/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/language/__init__.py - google/cloud/language/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v1/.flake8 b/owl-bot-staging/v1/.flake8 deleted file mode 100644 index 29227d4c..00000000 --- a/owl-bot-staging/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in deleted file mode 100644 index e0f21a43..00000000 --- a/owl-bot-staging/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/language *.py -recursive-include google/cloud/language_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst deleted file mode 100644 index 0c5f1b6b..00000000 --- a/owl-bot-staging/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Language API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Language API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py deleted file mode 100644 index 2e1b322d..00000000 --- a/owl-bot-staging/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-language documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-language" -copyright = u"2022, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-language-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-language.tex", - u"google-cloud-language Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-language", - u"Google Cloud Language Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-language", - u"google-cloud-language Documentation", - author, - "google-cloud-language", - "GAPIC library for Google Cloud Language API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst deleted file mode 100644 index 90928956..00000000 --- a/owl-bot-staging/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - language_v1/services - language_v1/types diff --git a/owl-bot-staging/v1/docs/language_v1/language_service.rst b/owl-bot-staging/v1/docs/language_v1/language_service.rst deleted file mode 100644 index 96e8755a..00000000 --- a/owl-bot-staging/v1/docs/language_v1/language_service.rst +++ /dev/null @@ -1,6 +0,0 @@ -LanguageService ---------------------------------- - -.. automodule:: google.cloud.language_v1.services.language_service - :members: - :inherited-members: diff --git a/owl-bot-staging/v1/docs/language_v1/services.rst b/owl-bot-staging/v1/docs/language_v1/services.rst deleted file mode 100644 index 26f74fe9..00000000 --- a/owl-bot-staging/v1/docs/language_v1/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Language v1 API -========================================= -.. toctree:: - :maxdepth: 2 - - language_service diff --git a/owl-bot-staging/v1/docs/language_v1/types.rst b/owl-bot-staging/v1/docs/language_v1/types.rst deleted file mode 100644 index 5dd3769e..00000000 --- a/owl-bot-staging/v1/docs/language_v1/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Language v1 API -====================================== - -.. automodule:: google.cloud.language_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v1/google/cloud/language/__init__.py b/owl-bot-staging/v1/google/cloud/language/__init__.py deleted file mode 100644 index f65e8909..00000000 --- a/owl-bot-staging/v1/google/cloud/language/__init__.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.language import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.language_v1.services.language_service.client import LanguageServiceClient -from google.cloud.language_v1.services.language_service.async_client import LanguageServiceAsyncClient - -from google.cloud.language_v1.types.language_service import AnalyzeEntitiesRequest -from google.cloud.language_v1.types.language_service import AnalyzeEntitiesResponse -from google.cloud.language_v1.types.language_service import AnalyzeEntitySentimentRequest -from google.cloud.language_v1.types.language_service import AnalyzeEntitySentimentResponse -from google.cloud.language_v1.types.language_service import AnalyzeSentimentRequest -from google.cloud.language_v1.types.language_service import AnalyzeSentimentResponse -from google.cloud.language_v1.types.language_service import AnalyzeSyntaxRequest -from google.cloud.language_v1.types.language_service import AnalyzeSyntaxResponse -from google.cloud.language_v1.types.language_service import AnnotateTextRequest -from google.cloud.language_v1.types.language_service import AnnotateTextResponse -from google.cloud.language_v1.types.language_service import ClassificationCategory -from google.cloud.language_v1.types.language_service import ClassificationModelOptions -from google.cloud.language_v1.types.language_service import ClassifyTextRequest -from google.cloud.language_v1.types.language_service import ClassifyTextResponse -from google.cloud.language_v1.types.language_service import DependencyEdge -from google.cloud.language_v1.types.language_service import Document -from google.cloud.language_v1.types.language_service import Entity -from google.cloud.language_v1.types.language_service import EntityMention -from google.cloud.language_v1.types.language_service import ModerateTextRequest -from google.cloud.language_v1.types.language_service import ModerateTextResponse -from google.cloud.language_v1.types.language_service import PartOfSpeech -from google.cloud.language_v1.types.language_service import Sentence -from google.cloud.language_v1.types.language_service import Sentiment -from google.cloud.language_v1.types.language_service import TextSpan -from google.cloud.language_v1.types.language_service import Token -from google.cloud.language_v1.types.language_service import EncodingType - -__all__ = ('LanguageServiceClient', - 'LanguageServiceAsyncClient', - 'AnalyzeEntitiesRequest', - 'AnalyzeEntitiesResponse', - 'AnalyzeEntitySentimentRequest', - 'AnalyzeEntitySentimentResponse', - 'AnalyzeSentimentRequest', - 'AnalyzeSentimentResponse', - 'AnalyzeSyntaxRequest', - 'AnalyzeSyntaxResponse', - 'AnnotateTextRequest', - 'AnnotateTextResponse', - 'ClassificationCategory', - 'ClassificationModelOptions', - 'ClassifyTextRequest', - 'ClassifyTextResponse', - 'DependencyEdge', - 'Document', - 'Entity', - 'EntityMention', - 'ModerateTextRequest', - 'ModerateTextResponse', - 'PartOfSpeech', - 'Sentence', - 'Sentiment', - 'TextSpan', - 'Token', - 'EncodingType', -) diff --git a/owl-bot-staging/v1/google/cloud/language/gapic_version.py b/owl-bot-staging/v1/google/cloud/language/gapic_version.py deleted file mode 100644 index 405b1ceb..00000000 --- a/owl-bot-staging/v1/google/cloud/language/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/language/py.typed b/owl-bot-staging/v1/google/cloud/language/py.typed deleted file mode 100644 index c0acc99a..00000000 --- a/owl-bot-staging/v1/google/cloud/language/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/language_v1/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/__init__.py deleted file mode 100644 index 98a10950..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/__init__.py +++ /dev/null @@ -1,80 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.language_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.language_service import LanguageServiceClient -from .services.language_service import LanguageServiceAsyncClient - -from .types.language_service import AnalyzeEntitiesRequest -from .types.language_service import AnalyzeEntitiesResponse -from .types.language_service import AnalyzeEntitySentimentRequest -from .types.language_service import AnalyzeEntitySentimentResponse -from .types.language_service import AnalyzeSentimentRequest -from .types.language_service import AnalyzeSentimentResponse -from .types.language_service import AnalyzeSyntaxRequest -from .types.language_service import AnalyzeSyntaxResponse -from .types.language_service import AnnotateTextRequest -from .types.language_service import AnnotateTextResponse -from .types.language_service import ClassificationCategory -from .types.language_service import ClassificationModelOptions -from .types.language_service import ClassifyTextRequest -from .types.language_service import ClassifyTextResponse -from .types.language_service import DependencyEdge -from .types.language_service import Document -from .types.language_service import Entity -from .types.language_service import EntityMention -from .types.language_service import ModerateTextRequest -from .types.language_service import ModerateTextResponse -from .types.language_service import PartOfSpeech -from .types.language_service import Sentence -from .types.language_service import Sentiment -from .types.language_service import TextSpan -from .types.language_service import Token -from .types.language_service import EncodingType - -__all__ = ( - 'LanguageServiceAsyncClient', -'AnalyzeEntitiesRequest', -'AnalyzeEntitiesResponse', -'AnalyzeEntitySentimentRequest', -'AnalyzeEntitySentimentResponse', -'AnalyzeSentimentRequest', -'AnalyzeSentimentResponse', -'AnalyzeSyntaxRequest', -'AnalyzeSyntaxResponse', -'AnnotateTextRequest', -'AnnotateTextResponse', -'ClassificationCategory', -'ClassificationModelOptions', -'ClassifyTextRequest', -'ClassifyTextResponse', -'DependencyEdge', -'Document', -'EncodingType', -'Entity', -'EntityMention', -'LanguageServiceClient', -'ModerateTextRequest', -'ModerateTextResponse', -'PartOfSpeech', -'Sentence', -'Sentiment', -'TextSpan', -'Token', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json deleted file mode 100644 index fa2c065a..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/gapic_metadata.json +++ /dev/null @@ -1,133 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.language_v1", - "protoPackage": "google.cloud.language.v1", - "schema": "1.0", - "services": { - "LanguageService": { - "clients": { - "grpc": { - "libraryClient": "LanguageServiceClient", - "rpcs": { - "AnalyzeEntities": { - "methods": [ - "analyze_entities" - ] - }, - "AnalyzeEntitySentiment": { - "methods": [ - "analyze_entity_sentiment" - ] - }, - "AnalyzeSentiment": { - "methods": [ - "analyze_sentiment" - ] - }, - "AnalyzeSyntax": { - "methods": [ - "analyze_syntax" - ] - }, - "AnnotateText": { - "methods": [ - "annotate_text" - ] - }, - "ClassifyText": { - "methods": [ - "classify_text" - ] - }, - "ModerateText": { - "methods": [ - "moderate_text" - ] - } - } - }, - "grpc-async": { - "libraryClient": "LanguageServiceAsyncClient", - "rpcs": { - "AnalyzeEntities": { - "methods": [ - "analyze_entities" - ] - }, - "AnalyzeEntitySentiment": { - "methods": [ - "analyze_entity_sentiment" - ] - }, - "AnalyzeSentiment": { - "methods": [ - "analyze_sentiment" - ] - }, - "AnalyzeSyntax": { - "methods": [ - "analyze_syntax" - ] - }, - "AnnotateText": { - "methods": [ - "annotate_text" - ] - }, - "ClassifyText": { - "methods": [ - "classify_text" - ] - }, - "ModerateText": { - "methods": [ - "moderate_text" - ] - } - } - }, - "rest": { - "libraryClient": "LanguageServiceClient", - "rpcs": { - "AnalyzeEntities": { - "methods": [ - "analyze_entities" - ] - }, - "AnalyzeEntitySentiment": { - "methods": [ - "analyze_entity_sentiment" - ] - }, - "AnalyzeSentiment": { - "methods": [ - "analyze_sentiment" - ] - }, - "AnalyzeSyntax": { - "methods": [ - "analyze_syntax" - ] - }, - "AnnotateText": { - "methods": [ - "annotate_text" - ] - }, - "ClassifyText": { - "methods": [ - "classify_text" - ] - }, - "ModerateText": { - "methods": [ - "moderate_text" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py b/owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py deleted file mode 100644 index 405b1ceb..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/language_v1/py.typed b/owl-bot-staging/v1/google/cloud/language_v1/py.typed deleted file mode 100644 index c0acc99a..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py deleted file mode 100644 index e8e1c384..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py deleted file mode 100644 index 6e5f9052..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import LanguageServiceClient -from .async_client import LanguageServiceAsyncClient - -__all__ = ( - 'LanguageServiceClient', - 'LanguageServiceAsyncClient', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py deleted file mode 100644 index cbd88917..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/async_client.py +++ /dev/null @@ -1,962 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.language_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.language_v1.types import language_service -from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport -from .client import LanguageServiceClient - - -class LanguageServiceAsyncClient: - """Provides text analysis operations such as sentiment analysis - and entity recognition. - """ - - _client: LanguageServiceClient - - DEFAULT_ENDPOINT = LanguageServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = LanguageServiceClient.DEFAULT_MTLS_ENDPOINT - - common_billing_account_path = staticmethod(LanguageServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(LanguageServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(LanguageServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(LanguageServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(LanguageServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(LanguageServiceClient.parse_common_organization_path) - common_project_path = staticmethod(LanguageServiceClient.common_project_path) - parse_common_project_path = staticmethod(LanguageServiceClient.parse_common_project_path) - common_location_path = staticmethod(LanguageServiceClient.common_location_path) - parse_common_location_path = staticmethod(LanguageServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceAsyncClient: The constructed client. - """ - return LanguageServiceClient.from_service_account_info.__func__(LanguageServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceAsyncClient: The constructed client. - """ - return LanguageServiceClient.from_service_account_file.__func__(LanguageServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return LanguageServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> LanguageServiceTransport: - """Returns the transport used by the client instance. - - Returns: - LanguageServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(LanguageServiceClient).get_transport_class, type(LanguageServiceClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, LanguageServiceTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the language service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.LanguageServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = LanguageServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def analyze_sentiment(self, - request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSentimentResponse: - r"""Analyzes the sentiment of the provided text. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - async def sample_analyze_sentiment(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1.types.AnalyzeSentimentRequest, dict]]): - The request object. The sentiment analysis request - message. - document (:class:`google.cloud.language_v1.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): - The encoding type used by the API to - calculate sentence offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeSentimentResponse: - The sentiment analysis response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeSentimentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_entities(self, - request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitiesResponse: - r"""Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - async def sample_analyze_entities(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entities(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1.types.AnalyzeEntitiesRequest, dict]]): - The request object. The entity analysis request message. - document (:class:`google.cloud.language_v1.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeEntitiesResponse: - The entity analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeEntitiesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_entities, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_entity_sentiment(self, - request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitySentimentResponse: - r"""Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - async def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1.types.AnalyzeEntitySentimentRequest, dict]]): - The request object. The entity-level sentiment analysis - request message. - document (:class:`google.cloud.language_v1.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeEntitySentimentResponse: - The entity-level sentiment analysis - response message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeEntitySentimentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_entity_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_syntax(self, - request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSyntaxResponse: - r"""Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - async def sample_analyze_syntax(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = await client.analyze_syntax(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1.types.AnalyzeSyntaxRequest, dict]]): - The request object. The syntax analysis request message. - document (:class:`google.cloud.language_v1.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeSyntaxResponse: - The syntax analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeSyntaxRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_syntax, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def classify_text(self, - request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.ClassifyTextResponse: - r"""Classifies a document into categories. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - async def sample_classify_text(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = await client.classify_text(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1.types.ClassifyTextRequest, dict]]): - The request object. The document classification request - message. - document (:class:`google.cloud.language_v1.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.ClassifyTextResponse: - The document classification response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.ClassifyTextRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.classify_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def moderate_text(self, - request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.ModerateTextResponse: - r"""Moderates a document for harmful and sensitive - categories. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - async def sample_moderate_text(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.ModerateTextRequest( - document=document, - ) - - # Make the request - response = await client.moderate_text(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1.types.ModerateTextRequest, dict]]): - The request object. The document moderation request - message. - document (:class:`google.cloud.language_v1.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.ModerateTextResponse: - The document moderation response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.ModerateTextRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.moderate_text, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def annotate_text(self, - request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - features: Optional[language_service.AnnotateTextRequest.Features] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnnotateTextResponse: - r"""A convenience method that provides all the features - that analyzeSentiment, analyzeEntities, and - analyzeSyntax provide in one call. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - async def sample_annotate_text(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = await client.annotate_text(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1.types.AnnotateTextRequest, dict]]): - The request object. The request message for the text - annotation API, which can perform - multiple analysis types (sentiment, - entities, and syntax) in one call. - document (:class:`google.cloud.language_v1.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - features (:class:`google.cloud.language_v1.types.AnnotateTextRequest.Features`): - Required. The enabled features. - This corresponds to the ``features`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnnotateTextResponse: - The text annotations response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, features, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnnotateTextRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if features is not None: - request.features = features - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.annotate_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "LanguageServiceAsyncClient", -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py deleted file mode 100644 index 0a25db80..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/client.py +++ /dev/null @@ -1,1115 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.language_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.language_v1.types import language_service -from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import LanguageServiceGrpcTransport -from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport -from .transports.rest import LanguageServiceRestTransport - - -class LanguageServiceClientMeta(type): - """Metaclass for the LanguageService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] - _transport_registry["grpc"] = LanguageServiceGrpcTransport - _transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport - _transport_registry["rest"] = LanguageServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[LanguageServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class LanguageServiceClient(metaclass=LanguageServiceClientMeta): - """Provides text analysis operations such as sentiment analysis - and entity recognition. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "language.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> LanguageServiceTransport: - """Returns the transport used by the client instance. - - Returns: - LanguageServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LanguageServiceTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the language service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, LanguageServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, LanguageServiceTransport): - # transport is a LanguageServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def analyze_sentiment(self, - request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSentimentResponse: - r"""Analyzes the sentiment of the provided text. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - def sample_analyze_sentiment(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1.types.AnalyzeSentimentRequest, dict]): - The request object. The sentiment analysis request - message. - document (google.cloud.language_v1.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate sentence offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeSentimentResponse: - The sentiment analysis response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeSentimentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeSentimentRequest): - request = language_service.AnalyzeSentimentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_sentiment] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_entities(self, - request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitiesResponse: - r"""Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - def sample_analyze_entities(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = client.analyze_entities(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1.types.AnalyzeEntitiesRequest, dict]): - The request object. The entity analysis request message. - document (google.cloud.language_v1.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeEntitiesResponse: - The entity analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeEntitiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeEntitiesRequest): - request = language_service.AnalyzeEntitiesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_entities] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_entity_sentiment(self, - request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitySentimentResponse: - r"""Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1.types.AnalyzeEntitySentimentRequest, dict]): - The request object. The entity-level sentiment analysis - request message. - document (google.cloud.language_v1.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeEntitySentimentResponse: - The entity-level sentiment analysis - response message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeEntitySentimentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeEntitySentimentRequest): - request = language_service.AnalyzeEntitySentimentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_entity_sentiment] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_syntax(self, - request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSyntaxResponse: - r"""Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - def sample_analyze_syntax(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = client.analyze_syntax(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1.types.AnalyzeSyntaxRequest, dict]): - The request object. The syntax analysis request message. - document (google.cloud.language_v1.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnalyzeSyntaxResponse: - The syntax analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeSyntaxRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeSyntaxRequest): - request = language_service.AnalyzeSyntaxRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_syntax] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def classify_text(self, - request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.ClassifyTextResponse: - r"""Classifies a document into categories. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - def sample_classify_text(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = client.classify_text(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1.types.ClassifyTextRequest, dict]): - The request object. The document classification request - message. - document (google.cloud.language_v1.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.ClassifyTextResponse: - The document classification response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.ClassifyTextRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.ClassifyTextRequest): - request = language_service.ClassifyTextRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.classify_text] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def moderate_text(self, - request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.ModerateTextResponse: - r"""Moderates a document for harmful and sensitive - categories. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - def sample_moderate_text(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.ModerateTextRequest( - document=document, - ) - - # Make the request - response = client.moderate_text(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1.types.ModerateTextRequest, dict]): - The request object. The document moderation request - message. - document (google.cloud.language_v1.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.ModerateTextResponse: - The document moderation response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.ModerateTextRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.ModerateTextRequest): - request = language_service.ModerateTextRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.moderate_text] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def annotate_text(self, - request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - features: Optional[language_service.AnnotateTextRequest.Features] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnnotateTextResponse: - r"""A convenience method that provides all the features - that analyzeSentiment, analyzeEntities, and - analyzeSyntax provide in one call. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1 - - def sample_annotate_text(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = client.annotate_text(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1.types.AnnotateTextRequest, dict]): - The request object. The request message for the text - annotation API, which can perform - multiple analysis types (sentiment, - entities, and syntax) in one call. - document (google.cloud.language_v1.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - features (google.cloud.language_v1.types.AnnotateTextRequest.Features): - Required. The enabled features. - This corresponds to the ``features`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1.types.AnnotateTextResponse: - The text annotations response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, features, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnnotateTextRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnnotateTextRequest): - request = language_service.AnnotateTextRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if features is not None: - request.features = features - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.annotate_text] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "LanguageServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "LanguageServiceClient", -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py deleted file mode 100644 index 3cb6ab92..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import LanguageServiceTransport -from .grpc import LanguageServiceGrpcTransport -from .grpc_asyncio import LanguageServiceGrpcAsyncIOTransport -from .rest import LanguageServiceRestTransport -from .rest import LanguageServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] -_transport_registry['grpc'] = LanguageServiceGrpcTransport -_transport_registry['grpc_asyncio'] = LanguageServiceGrpcAsyncIOTransport -_transport_registry['rest'] = LanguageServiceRestTransport - -__all__ = ( - 'LanguageServiceTransport', - 'LanguageServiceGrpcTransport', - 'LanguageServiceGrpcAsyncIOTransport', - 'LanguageServiceRestTransport', - 'LanguageServiceRestInterceptor', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py deleted file mode 100644 index 5b45658f..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/base.py +++ /dev/null @@ -1,275 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.language_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.language_v1.types import language_service - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class LanguageServiceTransport(abc.ABC): - """Abstract transport class for LanguageService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'language.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.analyze_sentiment: gapic_v1.method.wrap_method( - self.analyze_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.analyze_entities: gapic_v1.method.wrap_method( - self.analyze_entities, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.analyze_entity_sentiment: gapic_v1.method.wrap_method( - self.analyze_entity_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.analyze_syntax: gapic_v1.method.wrap_method( - self.analyze_syntax, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.classify_text: gapic_v1.method.wrap_method( - self.classify_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.moderate_text: gapic_v1.method.wrap_method( - self.moderate_text, - default_timeout=None, - client_info=client_info, - ), - self.annotate_text: gapic_v1.method.wrap_method( - self.annotate_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - Union[ - language_service.AnalyzeSentimentResponse, - Awaitable[language_service.AnalyzeSentimentResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - Union[ - language_service.AnalyzeEntitiesResponse, - Awaitable[language_service.AnalyzeEntitiesResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - Union[ - language_service.AnalyzeEntitySentimentResponse, - Awaitable[language_service.AnalyzeEntitySentimentResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - Union[ - language_service.AnalyzeSyntaxResponse, - Awaitable[language_service.AnalyzeSyntaxResponse] - ]]: - raise NotImplementedError() - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - Union[ - language_service.ClassifyTextResponse, - Awaitable[language_service.ClassifyTextResponse] - ]]: - raise NotImplementedError() - - @property - def moderate_text(self) -> Callable[ - [language_service.ModerateTextRequest], - Union[ - language_service.ModerateTextResponse, - Awaitable[language_service.ModerateTextResponse] - ]]: - raise NotImplementedError() - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - Union[ - language_service.AnnotateTextResponse, - Awaitable[language_service.AnnotateTextResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'LanguageServiceTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py deleted file mode 100644 index 5cb5acd2..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc.py +++ /dev/null @@ -1,432 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.language_v1.types import language_service -from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO - - -class LanguageServiceGrpcTransport(LanguageServiceTransport): - """gRPC backend transport for LanguageService. - - Provides text analysis operations such as sentiment analysis - and entity recognition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - language_service.AnalyzeSentimentResponse]: - r"""Return a callable for the analyze sentiment method over gRPC. - - Analyzes the sentiment of the provided text. - - Returns: - Callable[[~.AnalyzeSentimentRequest], - ~.AnalyzeSentimentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_sentiment' not in self._stubs: - self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeSentiment', - request_serializer=language_service.AnalyzeSentimentRequest.serialize, - response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, - ) - return self._stubs['analyze_sentiment'] - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - language_service.AnalyzeEntitiesResponse]: - r"""Return a callable for the analyze entities method over gRPC. - - Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - Returns: - Callable[[~.AnalyzeEntitiesRequest], - ~.AnalyzeEntitiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entities' not in self._stubs: - self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeEntities', - request_serializer=language_service.AnalyzeEntitiesRequest.serialize, - response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, - ) - return self._stubs['analyze_entities'] - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - language_service.AnalyzeEntitySentimentResponse]: - r"""Return a callable for the analyze entity sentiment method over gRPC. - - Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - Returns: - Callable[[~.AnalyzeEntitySentimentRequest], - ~.AnalyzeEntitySentimentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entity_sentiment' not in self._stubs: - self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment', - request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, - response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, - ) - return self._stubs['analyze_entity_sentiment'] - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - language_service.AnalyzeSyntaxResponse]: - r"""Return a callable for the analyze syntax method over gRPC. - - Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - Returns: - Callable[[~.AnalyzeSyntaxRequest], - ~.AnalyzeSyntaxResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_syntax' not in self._stubs: - self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', - request_serializer=language_service.AnalyzeSyntaxRequest.serialize, - response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, - ) - return self._stubs['analyze_syntax'] - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - language_service.ClassifyTextResponse]: - r"""Return a callable for the classify text method over gRPC. - - Classifies a document into categories. - - Returns: - Callable[[~.ClassifyTextRequest], - ~.ClassifyTextResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'classify_text' not in self._stubs: - self._stubs['classify_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/ClassifyText', - request_serializer=language_service.ClassifyTextRequest.serialize, - response_deserializer=language_service.ClassifyTextResponse.deserialize, - ) - return self._stubs['classify_text'] - - @property - def moderate_text(self) -> Callable[ - [language_service.ModerateTextRequest], - language_service.ModerateTextResponse]: - r"""Return a callable for the moderate text method over gRPC. - - Moderates a document for harmful and sensitive - categories. - - Returns: - Callable[[~.ModerateTextRequest], - ~.ModerateTextResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'moderate_text' not in self._stubs: - self._stubs['moderate_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/ModerateText', - request_serializer=language_service.ModerateTextRequest.serialize, - response_deserializer=language_service.ModerateTextResponse.deserialize, - ) - return self._stubs['moderate_text'] - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - language_service.AnnotateTextResponse]: - r"""Return a callable for the annotate text method over gRPC. - - A convenience method that provides all the features - that analyzeSentiment, analyzeEntities, and - analyzeSyntax provide in one call. - - Returns: - Callable[[~.AnnotateTextRequest], - ~.AnnotateTextResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'annotate_text' not in self._stubs: - self._stubs['annotate_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnnotateText', - request_serializer=language_service.AnnotateTextRequest.serialize, - response_deserializer=language_service.AnnotateTextResponse.deserialize, - ) - return self._stubs['annotate_text'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'LanguageServiceGrpcTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py deleted file mode 100644 index 8839befb..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,431 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.language_v1.types import language_service -from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import LanguageServiceGrpcTransport - - -class LanguageServiceGrpcAsyncIOTransport(LanguageServiceTransport): - """gRPC AsyncIO backend transport for LanguageService. - - Provides text analysis operations such as sentiment analysis - and entity recognition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - Awaitable[language_service.AnalyzeSentimentResponse]]: - r"""Return a callable for the analyze sentiment method over gRPC. - - Analyzes the sentiment of the provided text. - - Returns: - Callable[[~.AnalyzeSentimentRequest], - Awaitable[~.AnalyzeSentimentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_sentiment' not in self._stubs: - self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeSentiment', - request_serializer=language_service.AnalyzeSentimentRequest.serialize, - response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, - ) - return self._stubs['analyze_sentiment'] - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - Awaitable[language_service.AnalyzeEntitiesResponse]]: - r"""Return a callable for the analyze entities method over gRPC. - - Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - Returns: - Callable[[~.AnalyzeEntitiesRequest], - Awaitable[~.AnalyzeEntitiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entities' not in self._stubs: - self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeEntities', - request_serializer=language_service.AnalyzeEntitiesRequest.serialize, - response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, - ) - return self._stubs['analyze_entities'] - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - Awaitable[language_service.AnalyzeEntitySentimentResponse]]: - r"""Return a callable for the analyze entity sentiment method over gRPC. - - Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - Returns: - Callable[[~.AnalyzeEntitySentimentRequest], - Awaitable[~.AnalyzeEntitySentimentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entity_sentiment' not in self._stubs: - self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment', - request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, - response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, - ) - return self._stubs['analyze_entity_sentiment'] - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - Awaitable[language_service.AnalyzeSyntaxResponse]]: - r"""Return a callable for the analyze syntax method over gRPC. - - Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - Returns: - Callable[[~.AnalyzeSyntaxRequest], - Awaitable[~.AnalyzeSyntaxResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_syntax' not in self._stubs: - self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', - request_serializer=language_service.AnalyzeSyntaxRequest.serialize, - response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, - ) - return self._stubs['analyze_syntax'] - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - Awaitable[language_service.ClassifyTextResponse]]: - r"""Return a callable for the classify text method over gRPC. - - Classifies a document into categories. - - Returns: - Callable[[~.ClassifyTextRequest], - Awaitable[~.ClassifyTextResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'classify_text' not in self._stubs: - self._stubs['classify_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/ClassifyText', - request_serializer=language_service.ClassifyTextRequest.serialize, - response_deserializer=language_service.ClassifyTextResponse.deserialize, - ) - return self._stubs['classify_text'] - - @property - def moderate_text(self) -> Callable[ - [language_service.ModerateTextRequest], - Awaitable[language_service.ModerateTextResponse]]: - r"""Return a callable for the moderate text method over gRPC. - - Moderates a document for harmful and sensitive - categories. - - Returns: - Callable[[~.ModerateTextRequest], - Awaitable[~.ModerateTextResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'moderate_text' not in self._stubs: - self._stubs['moderate_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/ModerateText', - request_serializer=language_service.ModerateTextRequest.serialize, - response_deserializer=language_service.ModerateTextResponse.deserialize, - ) - return self._stubs['moderate_text'] - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - Awaitable[language_service.AnnotateTextResponse]]: - r"""Return a callable for the annotate text method over gRPC. - - A convenience method that provides all the features - that analyzeSentiment, analyzeEntities, and - analyzeSyntax provide in one call. - - Returns: - Callable[[~.AnnotateTextRequest], - Awaitable[~.AnnotateTextResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'annotate_text' not in self._stubs: - self._stubs['annotate_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1.LanguageService/AnnotateText', - request_serializer=language_service.AnnotateTextRequest.serialize, - response_deserializer=language_service.AnnotateTextResponse.deserialize, - ) - return self._stubs['annotate_text'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'LanguageServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py b/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py deleted file mode 100644 index eb368cc9..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/services/language_service/transports/rest.py +++ /dev/null @@ -1,1029 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.language_v1.types import language_service - -from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class LanguageServiceRestInterceptor: - """Interceptor for LanguageService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the LanguageServiceRestTransport. - - .. code-block:: python - class MyCustomLanguageServiceInterceptor(LanguageServiceRestInterceptor): - def pre_analyze_entities(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_entities(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_entity_sentiment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_entity_sentiment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_sentiment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_sentiment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_syntax(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_syntax(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_annotate_text(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_annotate_text(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_classify_text(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_classify_text(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_moderate_text(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_moderate_text(self, response): - logging.log(f"Received response: {response}") - return response - - transport = LanguageServiceRestTransport(interceptor=MyCustomLanguageServiceInterceptor()) - client = LanguageServiceClient(transport=transport) - - - """ - def pre_analyze_entities(self, request: language_service.AnalyzeEntitiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitiesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_entities - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_entities(self, response: language_service.AnalyzeEntitiesResponse) -> language_service.AnalyzeEntitiesResponse: - """Post-rpc interceptor for analyze_entities - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_analyze_entity_sentiment(self, request: language_service.AnalyzeEntitySentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitySentimentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_entity_sentiment - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_entity_sentiment(self, response: language_service.AnalyzeEntitySentimentResponse) -> language_service.AnalyzeEntitySentimentResponse: - """Post-rpc interceptor for analyze_entity_sentiment - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_analyze_sentiment(self, request: language_service.AnalyzeSentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSentimentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_sentiment - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_sentiment(self, response: language_service.AnalyzeSentimentResponse) -> language_service.AnalyzeSentimentResponse: - """Post-rpc interceptor for analyze_sentiment - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_analyze_syntax(self, request: language_service.AnalyzeSyntaxRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSyntaxRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_syntax - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_syntax(self, response: language_service.AnalyzeSyntaxResponse) -> language_service.AnalyzeSyntaxResponse: - """Post-rpc interceptor for analyze_syntax - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_annotate_text(self, request: language_service.AnnotateTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnnotateTextRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for annotate_text - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_annotate_text(self, response: language_service.AnnotateTextResponse) -> language_service.AnnotateTextResponse: - """Post-rpc interceptor for annotate_text - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_classify_text(self, request: language_service.ClassifyTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.ClassifyTextRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for classify_text - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_classify_text(self, response: language_service.ClassifyTextResponse) -> language_service.ClassifyTextResponse: - """Post-rpc interceptor for classify_text - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_moderate_text(self, request: language_service.ModerateTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.ModerateTextRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for moderate_text - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_moderate_text(self, response: language_service.ModerateTextResponse) -> language_service.ModerateTextResponse: - """Post-rpc interceptor for moderate_text - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class LanguageServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: LanguageServiceRestInterceptor - - -class LanguageServiceRestTransport(LanguageServiceTransport): - """REST backend transport for LanguageService. - - Provides text analysis operations such as sentiment analysis - and entity recognition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[LanguageServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or LanguageServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _AnalyzeEntities(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeEntities") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeEntitiesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeEntitiesResponse: - r"""Call the analyze entities method over HTTP. - - Args: - request (~.language_service.AnalyzeEntitiesRequest): - The request object. The entity analysis request message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeEntitiesResponse: - The entity analysis response message. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/documents:analyzeEntities', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_entities(request, metadata) - pb_request = language_service.AnalyzeEntitiesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeEntitiesResponse() - pb_resp = language_service.AnalyzeEntitiesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_entities(resp) - return resp - - class _AnalyzeEntitySentiment(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeEntitySentiment") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeEntitySentimentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeEntitySentimentResponse: - r"""Call the analyze entity sentiment method over HTTP. - - Args: - request (~.language_service.AnalyzeEntitySentimentRequest): - The request object. The entity-level sentiment analysis - request message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeEntitySentimentResponse: - The entity-level sentiment analysis - response message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/documents:analyzeEntitySentiment', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_entity_sentiment(request, metadata) - pb_request = language_service.AnalyzeEntitySentimentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeEntitySentimentResponse() - pb_resp = language_service.AnalyzeEntitySentimentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_entity_sentiment(resp) - return resp - - class _AnalyzeSentiment(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeSentiment") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeSentimentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeSentimentResponse: - r"""Call the analyze sentiment method over HTTP. - - Args: - request (~.language_service.AnalyzeSentimentRequest): - The request object. The sentiment analysis request - message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeSentimentResponse: - The sentiment analysis response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/documents:analyzeSentiment', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_sentiment(request, metadata) - pb_request = language_service.AnalyzeSentimentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeSentimentResponse() - pb_resp = language_service.AnalyzeSentimentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_sentiment(resp) - return resp - - class _AnalyzeSyntax(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeSyntax") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeSyntaxRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeSyntaxResponse: - r"""Call the analyze syntax method over HTTP. - - Args: - request (~.language_service.AnalyzeSyntaxRequest): - The request object. The syntax analysis request message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeSyntaxResponse: - The syntax analysis response message. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/documents:analyzeSyntax', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_syntax(request, metadata) - pb_request = language_service.AnalyzeSyntaxRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeSyntaxResponse() - pb_resp = language_service.AnalyzeSyntaxResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_syntax(resp) - return resp - - class _AnnotateText(LanguageServiceRestStub): - def __hash__(self): - return hash("AnnotateText") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnnotateTextRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnnotateTextResponse: - r"""Call the annotate text method over HTTP. - - Args: - request (~.language_service.AnnotateTextRequest): - The request object. The request message for the text - annotation API, which can perform - multiple analysis types (sentiment, - entities, and syntax) in one call. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnnotateTextResponse: - The text annotations response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/documents:annotateText', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_annotate_text(request, metadata) - pb_request = language_service.AnnotateTextRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnnotateTextResponse() - pb_resp = language_service.AnnotateTextResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_annotate_text(resp) - return resp - - class _ClassifyText(LanguageServiceRestStub): - def __hash__(self): - return hash("ClassifyText") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.ClassifyTextRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.ClassifyTextResponse: - r"""Call the classify text method over HTTP. - - Args: - request (~.language_service.ClassifyTextRequest): - The request object. The document classification request - message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.ClassifyTextResponse: - The document classification response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/documents:classifyText', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_classify_text(request, metadata) - pb_request = language_service.ClassifyTextRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.ClassifyTextResponse() - pb_resp = language_service.ClassifyTextResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_classify_text(resp) - return resp - - class _ModerateText(LanguageServiceRestStub): - def __hash__(self): - return hash("ModerateText") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.ModerateTextRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.ModerateTextResponse: - r"""Call the moderate text method over HTTP. - - Args: - request (~.language_service.ModerateTextRequest): - The request object. The document moderation request - message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.ModerateTextResponse: - The document moderation response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/documents:moderateText', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_moderate_text(request, metadata) - pb_request = language_service.ModerateTextRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.ModerateTextResponse() - pb_resp = language_service.ModerateTextResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_moderate_text(resp) - return resp - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - language_service.AnalyzeEntitiesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeEntities(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - language_service.AnalyzeEntitySentimentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeEntitySentiment(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - language_service.AnalyzeSentimentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeSentiment(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - language_service.AnalyzeSyntaxResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeSyntax(self._session, self._host, self._interceptor) # type: ignore - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - language_service.AnnotateTextResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnnotateText(self._session, self._host, self._interceptor) # type: ignore - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - language_service.ClassifyTextResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ClassifyText(self._session, self._host, self._interceptor) # type: ignore - - @property - def moderate_text(self) -> Callable[ - [language_service.ModerateTextRequest], - language_service.ModerateTextResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ModerateText(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'LanguageServiceRestTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py deleted file mode 100644 index 8dadfa8a..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/types/__init__.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .language_service import ( - AnalyzeEntitiesRequest, - AnalyzeEntitiesResponse, - AnalyzeEntitySentimentRequest, - AnalyzeEntitySentimentResponse, - AnalyzeSentimentRequest, - AnalyzeSentimentResponse, - AnalyzeSyntaxRequest, - AnalyzeSyntaxResponse, - AnnotateTextRequest, - AnnotateTextResponse, - ClassificationCategory, - ClassificationModelOptions, - ClassifyTextRequest, - ClassifyTextResponse, - DependencyEdge, - Document, - Entity, - EntityMention, - ModerateTextRequest, - ModerateTextResponse, - PartOfSpeech, - Sentence, - Sentiment, - TextSpan, - Token, - EncodingType, -) - -__all__ = ( - 'AnalyzeEntitiesRequest', - 'AnalyzeEntitiesResponse', - 'AnalyzeEntitySentimentRequest', - 'AnalyzeEntitySentimentResponse', - 'AnalyzeSentimentRequest', - 'AnalyzeSentimentResponse', - 'AnalyzeSyntaxRequest', - 'AnalyzeSyntaxResponse', - 'AnnotateTextRequest', - 'AnnotateTextResponse', - 'ClassificationCategory', - 'ClassificationModelOptions', - 'ClassifyTextRequest', - 'ClassifyTextResponse', - 'DependencyEdge', - 'Document', - 'Entity', - 'EntityMention', - 'ModerateTextRequest', - 'ModerateTextResponse', - 'PartOfSpeech', - 'Sentence', - 'Sentiment', - 'TextSpan', - 'Token', - 'EncodingType', -) diff --git a/owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py b/owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py deleted file mode 100644 index 56d1d3ed..00000000 --- a/owl-bot-staging/v1/google/cloud/language_v1/types/language_service.py +++ /dev/null @@ -1,1724 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.language.v1', - manifest={ - 'EncodingType', - 'Document', - 'Sentence', - 'Entity', - 'Token', - 'Sentiment', - 'PartOfSpeech', - 'DependencyEdge', - 'EntityMention', - 'TextSpan', - 'ClassificationCategory', - 'ClassificationModelOptions', - 'AnalyzeSentimentRequest', - 'AnalyzeSentimentResponse', - 'AnalyzeEntitySentimentRequest', - 'AnalyzeEntitySentimentResponse', - 'AnalyzeEntitiesRequest', - 'AnalyzeEntitiesResponse', - 'AnalyzeSyntaxRequest', - 'AnalyzeSyntaxResponse', - 'ClassifyTextRequest', - 'ClassifyTextResponse', - 'ModerateTextRequest', - 'ModerateTextResponse', - 'AnnotateTextRequest', - 'AnnotateTextResponse', - }, -) - - -class EncodingType(proto.Enum): - r"""Represents the text encoding that the caller uses to process the - output. Providing an ``EncodingType`` is recommended because the API - provides the beginning offsets for various outputs, such as tokens - and mentions, and languages that natively use different text - encodings may access offsets differently. - - Values: - NONE (0): - If ``EncodingType`` is not specified, encoding-dependent - information (such as ``begin_offset``) will be set at - ``-1``. - UTF8 (1): - Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-8 encoding of the input. C++ and - Go are examples of languages that use this encoding - natively. - UTF16 (2): - Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-16 encoding of the input. Java - and JavaScript are examples of languages that use this - encoding natively. - UTF32 (3): - Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-32 encoding of the input. Python - is an example of a language that uses this encoding - natively. - """ - NONE = 0 - UTF8 = 1 - UTF16 = 2 - UTF32 = 3 - - -class Document(proto.Message): - r"""Represents the input to API methods. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - type_ (google.cloud.language_v1.types.Document.Type): - Required. If the type is not set or is ``TYPE_UNSPECIFIED``, - returns an ``INVALID_ARGUMENT`` error. - content (str): - The content of the input in string format. - Cloud audit logging exempt since it is based on - user data. - - This field is a member of `oneof`_ ``source``. - gcs_content_uri (str): - The Google Cloud Storage URI where the file content is - located. This URI must be of the form: - gs://bucket_name/object_name. For more details, see - https://cloud.google.com/storage/docs/reference-uris. NOTE: - Cloud Storage object versioning is not supported. - - This field is a member of `oneof`_ ``source``. - language (str): - The language of the document (if not specified, the language - is automatically detected). Both ISO and BCP-47 language - codes are accepted. `Language - Support `__ - lists currently supported languages for each API method. If - the language (either specified by the caller or - automatically detected) is not supported by the called API - method, an ``INVALID_ARGUMENT`` error is returned. - """ - class Type(proto.Enum): - r"""The document types enum. - - Values: - TYPE_UNSPECIFIED (0): - The content type is not specified. - PLAIN_TEXT (1): - Plain text - HTML (2): - HTML - """ - TYPE_UNSPECIFIED = 0 - PLAIN_TEXT = 1 - HTML = 2 - - type_: Type = proto.Field( - proto.ENUM, - number=1, - enum=Type, - ) - content: str = proto.Field( - proto.STRING, - number=2, - oneof='source', - ) - gcs_content_uri: str = proto.Field( - proto.STRING, - number=3, - oneof='source', - ) - language: str = proto.Field( - proto.STRING, - number=4, - ) - - -class Sentence(proto.Message): - r"""Represents a sentence in the input document. - - Attributes: - text (google.cloud.language_v1.types.TextSpan): - The sentence text. - sentiment (google.cloud.language_v1.types.Sentiment): - For calls to [AnalyzeSentiment][] or if - [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment] - is set to true, this field will contain the sentiment for - the sentence. - """ - - text: 'TextSpan' = proto.Field( - proto.MESSAGE, - number=1, - message='TextSpan', - ) - sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=2, - message='Sentiment', - ) - - -class Entity(proto.Message): - r"""Represents a phrase in the text that is a known entity, such - as a person, an organization, or location. The API associates - information, such as salience and mentions, with entities. - - Attributes: - name (str): - The representative name for the entity. - type_ (google.cloud.language_v1.types.Entity.Type): - The entity type. - metadata (MutableMapping[str, str]): - Metadata associated with the entity. - - For most entity types, the metadata is a Wikipedia URL - (``wikipedia_url``) and Knowledge Graph MID (``mid``), if - they are available. For the metadata associated with other - entity types, see the Type table below. - salience (float): - The salience score associated with the entity in the [0, - 1.0] range. - - The salience score for an entity provides information about - the importance or centrality of that entity to the entire - document text. Scores closer to 0 are less salient, while - scores closer to 1.0 are highly salient. - mentions (MutableSequence[google.cloud.language_v1.types.EntityMention]): - The mentions of this entity in the input - document. The API currently supports proper noun - mentions. - sentiment (google.cloud.language_v1.types.Sentiment): - For calls to [AnalyzeEntitySentiment][] or if - [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entity_sentiment] - is set to true, this field will contain the aggregate - sentiment expressed for this entity in the provided - document. - """ - class Type(proto.Enum): - r"""The type of the entity. For most entity types, the associated - metadata is a Wikipedia URL (``wikipedia_url``) and Knowledge Graph - MID (``mid``). The table below lists the associated fields for - entities that have different metadata. - - Values: - UNKNOWN (0): - Unknown - PERSON (1): - Person - LOCATION (2): - Location - ORGANIZATION (3): - Organization - EVENT (4): - Event - WORK_OF_ART (5): - Artwork - CONSUMER_GOOD (6): - Consumer product - OTHER (7): - Other types of entities - PHONE_NUMBER (9): - Phone number - - The metadata lists the phone number, formatted according to - local convention, plus whichever additional elements appear - in the text: - - - ``number`` - the actual number, broken down into sections - as per local convention - - ``national_prefix`` - country code, if detected - - ``area_code`` - region or area code, if detected - - ``extension`` - phone extension (to be dialed after - connection), if detected - ADDRESS (10): - Address - - The metadata identifies the street number and locality plus - whichever additional elements appear in the text: - - - ``street_number`` - street number - - ``locality`` - city or town - - ``street_name`` - street/route name, if detected - - ``postal_code`` - postal code, if detected - - ``country`` - country, if detected< - - ``broad_region`` - administrative area, such as the - state, if detected - - ``narrow_region`` - smaller administrative area, such as - county, if detected - - ``sublocality`` - used in Asian addresses to demark a - district within a city, if detected - DATE (11): - Date - - The metadata identifies the components of the date: - - - ``year`` - four digit year, if detected - - ``month`` - two digit month number, if detected - - ``day`` - two digit day number, if detected - NUMBER (12): - Number - The metadata is the number itself. - PRICE (13): - Price - - The metadata identifies the ``value`` and ``currency``. - """ - UNKNOWN = 0 - PERSON = 1 - LOCATION = 2 - ORGANIZATION = 3 - EVENT = 4 - WORK_OF_ART = 5 - CONSUMER_GOOD = 6 - OTHER = 7 - PHONE_NUMBER = 9 - ADDRESS = 10 - DATE = 11 - NUMBER = 12 - PRICE = 13 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: Type = proto.Field( - proto.ENUM, - number=2, - enum=Type, - ) - metadata: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - salience: float = proto.Field( - proto.FLOAT, - number=4, - ) - mentions: MutableSequence['EntityMention'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='EntityMention', - ) - sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=6, - message='Sentiment', - ) - - -class Token(proto.Message): - r"""Represents the smallest syntactic building block of the text. - - Attributes: - text (google.cloud.language_v1.types.TextSpan): - The token text. - part_of_speech (google.cloud.language_v1.types.PartOfSpeech): - Parts of speech tag for this token. - dependency_edge (google.cloud.language_v1.types.DependencyEdge): - Dependency tree parse for this token. - lemma (str): - `Lemma `__ - of the token. - """ - - text: 'TextSpan' = proto.Field( - proto.MESSAGE, - number=1, - message='TextSpan', - ) - part_of_speech: 'PartOfSpeech' = proto.Field( - proto.MESSAGE, - number=2, - message='PartOfSpeech', - ) - dependency_edge: 'DependencyEdge' = proto.Field( - proto.MESSAGE, - number=3, - message='DependencyEdge', - ) - lemma: str = proto.Field( - proto.STRING, - number=4, - ) - - -class Sentiment(proto.Message): - r"""Represents the feeling associated with the entire text or - entities in the text. - - Attributes: - magnitude (float): - A non-negative number in the [0, +inf) range, which - represents the absolute magnitude of sentiment regardless of - score (positive or negative). - score (float): - Sentiment score between -1.0 (negative - sentiment) and 1.0 (positive sentiment). - """ - - magnitude: float = proto.Field( - proto.FLOAT, - number=2, - ) - score: float = proto.Field( - proto.FLOAT, - number=3, - ) - - -class PartOfSpeech(proto.Message): - r"""Represents part of speech information for a token. Parts of speech - are as defined in - http://www.lrec-conf.org/proceedings/lrec2012/pdf/274_Paper.pdf - - Attributes: - tag (google.cloud.language_v1.types.PartOfSpeech.Tag): - The part of speech tag. - aspect (google.cloud.language_v1.types.PartOfSpeech.Aspect): - The grammatical aspect. - case (google.cloud.language_v1.types.PartOfSpeech.Case): - The grammatical case. - form (google.cloud.language_v1.types.PartOfSpeech.Form): - The grammatical form. - gender (google.cloud.language_v1.types.PartOfSpeech.Gender): - The grammatical gender. - mood (google.cloud.language_v1.types.PartOfSpeech.Mood): - The grammatical mood. - number (google.cloud.language_v1.types.PartOfSpeech.Number): - The grammatical number. - person (google.cloud.language_v1.types.PartOfSpeech.Person): - The grammatical person. - proper (google.cloud.language_v1.types.PartOfSpeech.Proper): - The grammatical properness. - reciprocity (google.cloud.language_v1.types.PartOfSpeech.Reciprocity): - The grammatical reciprocity. - tense (google.cloud.language_v1.types.PartOfSpeech.Tense): - The grammatical tense. - voice (google.cloud.language_v1.types.PartOfSpeech.Voice): - The grammatical voice. - """ - class Tag(proto.Enum): - r"""The part of speech tags enum. - - Values: - UNKNOWN (0): - Unknown - ADJ (1): - Adjective - ADP (2): - Adposition (preposition and postposition) - ADV (3): - Adverb - CONJ (4): - Conjunction - DET (5): - Determiner - NOUN (6): - Noun (common and proper) - NUM (7): - Cardinal number - PRON (8): - Pronoun - PRT (9): - Particle or other function word - PUNCT (10): - Punctuation - VERB (11): - Verb (all tenses and modes) - X (12): - Other: foreign words, typos, abbreviations - AFFIX (13): - Affix - """ - UNKNOWN = 0 - ADJ = 1 - ADP = 2 - ADV = 3 - CONJ = 4 - DET = 5 - NOUN = 6 - NUM = 7 - PRON = 8 - PRT = 9 - PUNCT = 10 - VERB = 11 - X = 12 - AFFIX = 13 - - class Aspect(proto.Enum): - r"""The characteristic of a verb that expresses time flow during - an event. - - Values: - ASPECT_UNKNOWN (0): - Aspect is not applicable in the analyzed - language or is not predicted. - PERFECTIVE (1): - Perfective - IMPERFECTIVE (2): - Imperfective - PROGRESSIVE (3): - Progressive - """ - ASPECT_UNKNOWN = 0 - PERFECTIVE = 1 - IMPERFECTIVE = 2 - PROGRESSIVE = 3 - - class Case(proto.Enum): - r"""The grammatical function performed by a noun or pronoun in a - phrase, clause, or sentence. In some languages, other parts of - speech, such as adjective and determiner, take case inflection - in agreement with the noun. - - Values: - CASE_UNKNOWN (0): - Case is not applicable in the analyzed - language or is not predicted. - ACCUSATIVE (1): - Accusative - ADVERBIAL (2): - Adverbial - COMPLEMENTIVE (3): - Complementive - DATIVE (4): - Dative - GENITIVE (5): - Genitive - INSTRUMENTAL (6): - Instrumental - LOCATIVE (7): - Locative - NOMINATIVE (8): - Nominative - OBLIQUE (9): - Oblique - PARTITIVE (10): - Partitive - PREPOSITIONAL (11): - Prepositional - REFLEXIVE_CASE (12): - Reflexive - RELATIVE_CASE (13): - Relative - VOCATIVE (14): - Vocative - """ - CASE_UNKNOWN = 0 - ACCUSATIVE = 1 - ADVERBIAL = 2 - COMPLEMENTIVE = 3 - DATIVE = 4 - GENITIVE = 5 - INSTRUMENTAL = 6 - LOCATIVE = 7 - NOMINATIVE = 8 - OBLIQUE = 9 - PARTITIVE = 10 - PREPOSITIONAL = 11 - REFLEXIVE_CASE = 12 - RELATIVE_CASE = 13 - VOCATIVE = 14 - - class Form(proto.Enum): - r"""Depending on the language, Form can be categorizing different - forms of verbs, adjectives, adverbs, etc. For example, - categorizing inflected endings of verbs and adjectives or - distinguishing between short and long forms of adjectives and - participles - - Values: - FORM_UNKNOWN (0): - Form is not applicable in the analyzed - language or is not predicted. - ADNOMIAL (1): - Adnomial - AUXILIARY (2): - Auxiliary - COMPLEMENTIZER (3): - Complementizer - FINAL_ENDING (4): - Final ending - GERUND (5): - Gerund - REALIS (6): - Realis - IRREALIS (7): - Irrealis - SHORT (8): - Short form - LONG (9): - Long form - ORDER (10): - Order form - SPECIFIC (11): - Specific form - """ - FORM_UNKNOWN = 0 - ADNOMIAL = 1 - AUXILIARY = 2 - COMPLEMENTIZER = 3 - FINAL_ENDING = 4 - GERUND = 5 - REALIS = 6 - IRREALIS = 7 - SHORT = 8 - LONG = 9 - ORDER = 10 - SPECIFIC = 11 - - class Gender(proto.Enum): - r"""Gender classes of nouns reflected in the behaviour of - associated words. - - Values: - GENDER_UNKNOWN (0): - Gender is not applicable in the analyzed - language or is not predicted. - FEMININE (1): - Feminine - MASCULINE (2): - Masculine - NEUTER (3): - Neuter - """ - GENDER_UNKNOWN = 0 - FEMININE = 1 - MASCULINE = 2 - NEUTER = 3 - - class Mood(proto.Enum): - r"""The grammatical feature of verbs, used for showing modality - and attitude. - - Values: - MOOD_UNKNOWN (0): - Mood is not applicable in the analyzed - language or is not predicted. - CONDITIONAL_MOOD (1): - Conditional - IMPERATIVE (2): - Imperative - INDICATIVE (3): - Indicative - INTERROGATIVE (4): - Interrogative - JUSSIVE (5): - Jussive - SUBJUNCTIVE (6): - Subjunctive - """ - MOOD_UNKNOWN = 0 - CONDITIONAL_MOOD = 1 - IMPERATIVE = 2 - INDICATIVE = 3 - INTERROGATIVE = 4 - JUSSIVE = 5 - SUBJUNCTIVE = 6 - - class Number(proto.Enum): - r"""Count distinctions. - - Values: - NUMBER_UNKNOWN (0): - Number is not applicable in the analyzed - language or is not predicted. - SINGULAR (1): - Singular - PLURAL (2): - Plural - DUAL (3): - Dual - """ - NUMBER_UNKNOWN = 0 - SINGULAR = 1 - PLURAL = 2 - DUAL = 3 - - class Person(proto.Enum): - r"""The distinction between the speaker, second person, third - person, etc. - - Values: - PERSON_UNKNOWN (0): - Person is not applicable in the analyzed - language or is not predicted. - FIRST (1): - First - SECOND (2): - Second - THIRD (3): - Third - REFLEXIVE_PERSON (4): - Reflexive - """ - PERSON_UNKNOWN = 0 - FIRST = 1 - SECOND = 2 - THIRD = 3 - REFLEXIVE_PERSON = 4 - - class Proper(proto.Enum): - r"""This category shows if the token is part of a proper name. - - Values: - PROPER_UNKNOWN (0): - Proper is not applicable in the analyzed - language or is not predicted. - PROPER (1): - Proper - NOT_PROPER (2): - Not proper - """ - PROPER_UNKNOWN = 0 - PROPER = 1 - NOT_PROPER = 2 - - class Reciprocity(proto.Enum): - r"""Reciprocal features of a pronoun. - - Values: - RECIPROCITY_UNKNOWN (0): - Reciprocity is not applicable in the analyzed - language or is not predicted. - RECIPROCAL (1): - Reciprocal - NON_RECIPROCAL (2): - Non-reciprocal - """ - RECIPROCITY_UNKNOWN = 0 - RECIPROCAL = 1 - NON_RECIPROCAL = 2 - - class Tense(proto.Enum): - r"""Time reference. - - Values: - TENSE_UNKNOWN (0): - Tense is not applicable in the analyzed - language or is not predicted. - CONDITIONAL_TENSE (1): - Conditional - FUTURE (2): - Future - PAST (3): - Past - PRESENT (4): - Present - IMPERFECT (5): - Imperfect - PLUPERFECT (6): - Pluperfect - """ - TENSE_UNKNOWN = 0 - CONDITIONAL_TENSE = 1 - FUTURE = 2 - PAST = 3 - PRESENT = 4 - IMPERFECT = 5 - PLUPERFECT = 6 - - class Voice(proto.Enum): - r"""The relationship between the action that a verb expresses and - the participants identified by its arguments. - - Values: - VOICE_UNKNOWN (0): - Voice is not applicable in the analyzed - language or is not predicted. - ACTIVE (1): - Active - CAUSATIVE (2): - Causative - PASSIVE (3): - Passive - """ - VOICE_UNKNOWN = 0 - ACTIVE = 1 - CAUSATIVE = 2 - PASSIVE = 3 - - tag: Tag = proto.Field( - proto.ENUM, - number=1, - enum=Tag, - ) - aspect: Aspect = proto.Field( - proto.ENUM, - number=2, - enum=Aspect, - ) - case: Case = proto.Field( - proto.ENUM, - number=3, - enum=Case, - ) - form: Form = proto.Field( - proto.ENUM, - number=4, - enum=Form, - ) - gender: Gender = proto.Field( - proto.ENUM, - number=5, - enum=Gender, - ) - mood: Mood = proto.Field( - proto.ENUM, - number=6, - enum=Mood, - ) - number: Number = proto.Field( - proto.ENUM, - number=7, - enum=Number, - ) - person: Person = proto.Field( - proto.ENUM, - number=8, - enum=Person, - ) - proper: Proper = proto.Field( - proto.ENUM, - number=9, - enum=Proper, - ) - reciprocity: Reciprocity = proto.Field( - proto.ENUM, - number=10, - enum=Reciprocity, - ) - tense: Tense = proto.Field( - proto.ENUM, - number=11, - enum=Tense, - ) - voice: Voice = proto.Field( - proto.ENUM, - number=12, - enum=Voice, - ) - - -class DependencyEdge(proto.Message): - r"""Represents dependency parse tree information for a token. - (For more information on dependency labels, see - http://www.aclweb.org/anthology/P13-2017 - - Attributes: - head_token_index (int): - Represents the head of this token in the dependency tree. - This is the index of the token which has an arc going to - this token. The index is the position of the token in the - array of tokens returned by the API method. If this token is - a root token, then the ``head_token_index`` is its own - index. - label (google.cloud.language_v1.types.DependencyEdge.Label): - The parse label for the token. - """ - class Label(proto.Enum): - r"""The parse label enum for the token. - - Values: - UNKNOWN (0): - Unknown - ABBREV (1): - Abbreviation modifier - ACOMP (2): - Adjectival complement - ADVCL (3): - Adverbial clause modifier - ADVMOD (4): - Adverbial modifier - AMOD (5): - Adjectival modifier of an NP - APPOS (6): - Appositional modifier of an NP - ATTR (7): - Attribute dependent of a copular verb - AUX (8): - Auxiliary (non-main) verb - AUXPASS (9): - Passive auxiliary - CC (10): - Coordinating conjunction - CCOMP (11): - Clausal complement of a verb or adjective - CONJ (12): - Conjunct - CSUBJ (13): - Clausal subject - CSUBJPASS (14): - Clausal passive subject - DEP (15): - Dependency (unable to determine) - DET (16): - Determiner - DISCOURSE (17): - Discourse - DOBJ (18): - Direct object - EXPL (19): - Expletive - GOESWITH (20): - Goes with (part of a word in a text not well - edited) - IOBJ (21): - Indirect object - MARK (22): - Marker (word introducing a subordinate - clause) - MWE (23): - Multi-word expression - MWV (24): - Multi-word verbal expression - NEG (25): - Negation modifier - NN (26): - Noun compound modifier - NPADVMOD (27): - Noun phrase used as an adverbial modifier - NSUBJ (28): - Nominal subject - NSUBJPASS (29): - Passive nominal subject - NUM (30): - Numeric modifier of a noun - NUMBER (31): - Element of compound number - P (32): - Punctuation mark - PARATAXIS (33): - Parataxis relation - PARTMOD (34): - Participial modifier - PCOMP (35): - The complement of a preposition is a clause - POBJ (36): - Object of a preposition - POSS (37): - Possession modifier - POSTNEG (38): - Postverbal negative particle - PRECOMP (39): - Predicate complement - PRECONJ (40): - Preconjunt - PREDET (41): - Predeterminer - PREF (42): - Prefix - PREP (43): - Prepositional modifier - PRONL (44): - The relationship between a verb and verbal - morpheme - PRT (45): - Particle - PS (46): - Associative or possessive marker - QUANTMOD (47): - Quantifier phrase modifier - RCMOD (48): - Relative clause modifier - RCMODREL (49): - Complementizer in relative clause - RDROP (50): - Ellipsis without a preceding predicate - REF (51): - Referent - REMNANT (52): - Remnant - REPARANDUM (53): - Reparandum - ROOT (54): - Root - SNUM (55): - Suffix specifying a unit of number - SUFF (56): - Suffix - TMOD (57): - Temporal modifier - TOPIC (58): - Topic marker - VMOD (59): - Clause headed by an infinite form of the verb - that modifies a noun - VOCATIVE (60): - Vocative - XCOMP (61): - Open clausal complement - SUFFIX (62): - Name suffix - TITLE (63): - Name title - ADVPHMOD (64): - Adverbial phrase modifier - AUXCAUS (65): - Causative auxiliary - AUXVV (66): - Helper auxiliary - DTMOD (67): - Rentaishi (Prenominal modifier) - FOREIGN (68): - Foreign words - KW (69): - Keyword - LIST (70): - List for chains of comparable items - NOMC (71): - Nominalized clause - NOMCSUBJ (72): - Nominalized clausal subject - NOMCSUBJPASS (73): - Nominalized clausal passive - NUMC (74): - Compound of numeric modifier - COP (75): - Copula - DISLOCATED (76): - Dislocated relation (for fronted/topicalized - elements) - ASP (77): - Aspect marker - GMOD (78): - Genitive modifier - GOBJ (79): - Genitive object - INFMOD (80): - Infinitival modifier - MES (81): - Measure - NCOMP (82): - Nominal complement of a noun - """ - UNKNOWN = 0 - ABBREV = 1 - ACOMP = 2 - ADVCL = 3 - ADVMOD = 4 - AMOD = 5 - APPOS = 6 - ATTR = 7 - AUX = 8 - AUXPASS = 9 - CC = 10 - CCOMP = 11 - CONJ = 12 - CSUBJ = 13 - CSUBJPASS = 14 - DEP = 15 - DET = 16 - DISCOURSE = 17 - DOBJ = 18 - EXPL = 19 - GOESWITH = 20 - IOBJ = 21 - MARK = 22 - MWE = 23 - MWV = 24 - NEG = 25 - NN = 26 - NPADVMOD = 27 - NSUBJ = 28 - NSUBJPASS = 29 - NUM = 30 - NUMBER = 31 - P = 32 - PARATAXIS = 33 - PARTMOD = 34 - PCOMP = 35 - POBJ = 36 - POSS = 37 - POSTNEG = 38 - PRECOMP = 39 - PRECONJ = 40 - PREDET = 41 - PREF = 42 - PREP = 43 - PRONL = 44 - PRT = 45 - PS = 46 - QUANTMOD = 47 - RCMOD = 48 - RCMODREL = 49 - RDROP = 50 - REF = 51 - REMNANT = 52 - REPARANDUM = 53 - ROOT = 54 - SNUM = 55 - SUFF = 56 - TMOD = 57 - TOPIC = 58 - VMOD = 59 - VOCATIVE = 60 - XCOMP = 61 - SUFFIX = 62 - TITLE = 63 - ADVPHMOD = 64 - AUXCAUS = 65 - AUXVV = 66 - DTMOD = 67 - FOREIGN = 68 - KW = 69 - LIST = 70 - NOMC = 71 - NOMCSUBJ = 72 - NOMCSUBJPASS = 73 - NUMC = 74 - COP = 75 - DISLOCATED = 76 - ASP = 77 - GMOD = 78 - GOBJ = 79 - INFMOD = 80 - MES = 81 - NCOMP = 82 - - head_token_index: int = proto.Field( - proto.INT32, - number=1, - ) - label: Label = proto.Field( - proto.ENUM, - number=2, - enum=Label, - ) - - -class EntityMention(proto.Message): - r"""Represents a mention for an entity in the text. Currently, - proper noun mentions are supported. - - Attributes: - text (google.cloud.language_v1.types.TextSpan): - The mention text. - type_ (google.cloud.language_v1.types.EntityMention.Type): - The type of the entity mention. - sentiment (google.cloud.language_v1.types.Sentiment): - For calls to [AnalyzeEntitySentiment][] or if - [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entity_sentiment] - is set to true, this field will contain the sentiment - expressed for this mention of the entity in the provided - document. - """ - class Type(proto.Enum): - r"""The supported types of mentions. - - Values: - TYPE_UNKNOWN (0): - Unknown - PROPER (1): - Proper name - COMMON (2): - Common noun (or noun compound) - """ - TYPE_UNKNOWN = 0 - PROPER = 1 - COMMON = 2 - - text: 'TextSpan' = proto.Field( - proto.MESSAGE, - number=1, - message='TextSpan', - ) - type_: Type = proto.Field( - proto.ENUM, - number=2, - enum=Type, - ) - sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=3, - message='Sentiment', - ) - - -class TextSpan(proto.Message): - r"""Represents an output piece of text. - - Attributes: - content (str): - The content of the output text. - begin_offset (int): - The API calculates the beginning offset of the content in - the original document according to the - [EncodingType][google.cloud.language.v1.EncodingType] - specified in the API request. - """ - - content: str = proto.Field( - proto.STRING, - number=1, - ) - begin_offset: int = proto.Field( - proto.INT32, - number=2, - ) - - -class ClassificationCategory(proto.Message): - r"""Represents a category returned from the text classifier. - - Attributes: - name (str): - The name of the category representing the - document. - confidence (float): - The classifier's confidence of the category. - Number represents how certain the classifier is - that this category represents the given text. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - confidence: float = proto.Field( - proto.FLOAT, - number=2, - ) - - -class ClassificationModelOptions(proto.Message): - r"""Model options available for classification requests. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - v1_model (google.cloud.language_v1.types.ClassificationModelOptions.V1Model): - Setting this field will use the V1 model and - V1 content categories version. The V1 model is a - legacy model; support for this will be - discontinued in the future. - - This field is a member of `oneof`_ ``model_type``. - v2_model (google.cloud.language_v1.types.ClassificationModelOptions.V2Model): - Setting this field will use the V2 model with - the appropriate content categories version. The - V2 model is a better performing model. - - This field is a member of `oneof`_ ``model_type``. - """ - - class V1Model(proto.Message): - r"""Options for the V1 model. - """ - - class V2Model(proto.Message): - r"""Options for the V2 model. - - Attributes: - content_categories_version (google.cloud.language_v1.types.ClassificationModelOptions.V2Model.ContentCategoriesVersion): - The content categories used for - classification. - """ - class ContentCategoriesVersion(proto.Enum): - r"""The content categories used for classification. - - Values: - CONTENT_CATEGORIES_VERSION_UNSPECIFIED (0): - If ``ContentCategoriesVersion`` is not specified, this - option will default to ``V1``. - V1 (1): - Legacy content categories of our initial - launch in 2017. - V2 (2): - Updated content categories in 2022. - """ - CONTENT_CATEGORIES_VERSION_UNSPECIFIED = 0 - V1 = 1 - V2 = 2 - - content_categories_version: 'ClassificationModelOptions.V2Model.ContentCategoriesVersion' = proto.Field( - proto.ENUM, - number=1, - enum='ClassificationModelOptions.V2Model.ContentCategoriesVersion', - ) - - v1_model: V1Model = proto.Field( - proto.MESSAGE, - number=1, - oneof='model_type', - message=V1Model, - ) - v2_model: V2Model = proto.Field( - proto.MESSAGE, - number=2, - oneof='model_type', - message=V2Model, - ) - - -class AnalyzeSentimentRequest(proto.Message): - r"""The sentiment analysis request message. - - Attributes: - document (google.cloud.language_v1.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate sentence offsets. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeSentimentResponse(proto.Message): - r"""The sentiment analysis response message. - - Attributes: - document_sentiment (google.cloud.language_v1.types.Sentiment): - The overall sentiment of the input document. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1.Document.language] - field for more details. - sentences (MutableSequence[google.cloud.language_v1.types.Sentence]): - The sentiment for all the sentences in the - document. - """ - - document_sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=1, - message='Sentiment', - ) - language: str = proto.Field( - proto.STRING, - number=2, - ) - sentences: MutableSequence['Sentence'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Sentence', - ) - - -class AnalyzeEntitySentimentRequest(proto.Message): - r"""The entity-level sentiment analysis request message. - - Attributes: - document (google.cloud.language_v1.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeEntitySentimentResponse(proto.Message): - r"""The entity-level sentiment analysis response message. - - Attributes: - entities (MutableSequence[google.cloud.language_v1.types.Entity]): - The recognized entities in the input document - with associated sentiments. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1.Document.language] - field for more details. - """ - - entities: MutableSequence['Entity'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entity', - ) - language: str = proto.Field( - proto.STRING, - number=2, - ) - - -class AnalyzeEntitiesRequest(proto.Message): - r"""The entity analysis request message. - - Attributes: - document (google.cloud.language_v1.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeEntitiesResponse(proto.Message): - r"""The entity analysis response message. - - Attributes: - entities (MutableSequence[google.cloud.language_v1.types.Entity]): - The recognized entities in the input - document. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1.Document.language] - field for more details. - """ - - entities: MutableSequence['Entity'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entity', - ) - language: str = proto.Field( - proto.STRING, - number=2, - ) - - -class AnalyzeSyntaxRequest(proto.Message): - r"""The syntax analysis request message. - - Attributes: - document (google.cloud.language_v1.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeSyntaxResponse(proto.Message): - r"""The syntax analysis response message. - - Attributes: - sentences (MutableSequence[google.cloud.language_v1.types.Sentence]): - Sentences in the input document. - tokens (MutableSequence[google.cloud.language_v1.types.Token]): - Tokens, along with their syntactic - information, in the input document. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1.Document.language] - field for more details. - """ - - sentences: MutableSequence['Sentence'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Sentence', - ) - tokens: MutableSequence['Token'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='Token', - ) - language: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ClassifyTextRequest(proto.Message): - r"""The document classification request message. - - Attributes: - document (google.cloud.language_v1.types.Document): - Required. Input document. - classification_model_options (google.cloud.language_v1.types.ClassificationModelOptions): - Model options to use for classification. - Defaults to v1 options if not specified. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - classification_model_options: 'ClassificationModelOptions' = proto.Field( - proto.MESSAGE, - number=3, - message='ClassificationModelOptions', - ) - - -class ClassifyTextResponse(proto.Message): - r"""The document classification response message. - - Attributes: - categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): - Categories representing the input document. - """ - - categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ClassificationCategory', - ) - - -class ModerateTextRequest(proto.Message): - r"""The document moderation request message. - - Attributes: - document (google.cloud.language_v1.types.Document): - Required. Input document. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - - -class ModerateTextResponse(proto.Message): - r"""The document moderation response message. - - Attributes: - moderation_categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): - Harmful and sensitive categories representing - the input document. - """ - - moderation_categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ClassificationCategory', - ) - - -class AnnotateTextRequest(proto.Message): - r"""The request message for the text annotation API, which can - perform multiple analysis types (sentiment, entities, and - syntax) in one call. - - Attributes: - document (google.cloud.language_v1.types.Document): - Required. Input document. - features (google.cloud.language_v1.types.AnnotateTextRequest.Features): - Required. The enabled features. - encoding_type (google.cloud.language_v1.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - class Features(proto.Message): - r"""All available features for sentiment, syntax, and semantic - analysis. Setting each one to true will enable that specific - analysis for the input. - - Attributes: - extract_syntax (bool): - Extract syntax information. - extract_entities (bool): - Extract entities. - extract_document_sentiment (bool): - Extract document-level sentiment. - extract_entity_sentiment (bool): - Extract entities and their associated - sentiment. - classify_text (bool): - Classify the full document into categories. - moderate_text (bool): - Moderate the document for harmful and - sensitive categories. - classification_model_options (google.cloud.language_v1.types.ClassificationModelOptions): - The model options to use for classification. Defaults to v1 - options if not specified. Only used if ``classify_text`` is - set to true. - """ - - extract_syntax: bool = proto.Field( - proto.BOOL, - number=1, - ) - extract_entities: bool = proto.Field( - proto.BOOL, - number=2, - ) - extract_document_sentiment: bool = proto.Field( - proto.BOOL, - number=3, - ) - extract_entity_sentiment: bool = proto.Field( - proto.BOOL, - number=4, - ) - classify_text: bool = proto.Field( - proto.BOOL, - number=6, - ) - moderate_text: bool = proto.Field( - proto.BOOL, - number=11, - ) - classification_model_options: 'ClassificationModelOptions' = proto.Field( - proto.MESSAGE, - number=10, - message='ClassificationModelOptions', - ) - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - features: Features = proto.Field( - proto.MESSAGE, - number=2, - message=Features, - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=3, - enum='EncodingType', - ) - - -class AnnotateTextResponse(proto.Message): - r"""The text annotations response message. - - Attributes: - sentences (MutableSequence[google.cloud.language_v1.types.Sentence]): - Sentences in the input document. Populated if the user - enables - [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax]. - tokens (MutableSequence[google.cloud.language_v1.types.Token]): - Tokens, along with their syntactic information, in the input - document. Populated if the user enables - [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax]. - entities (MutableSequence[google.cloud.language_v1.types.Entity]): - Entities, along with their semantic information, in the - input document. Populated if the user enables - [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entities]. - document_sentiment (google.cloud.language_v1.types.Sentiment): - The overall sentiment for the document. Populated if the - user enables - [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment]. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1.Document.language] - field for more details. - categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): - Categories identified in the input document. - moderation_categories (MutableSequence[google.cloud.language_v1.types.ClassificationCategory]): - Harmful and sensitive categories identified - in the input document. - """ - - sentences: MutableSequence['Sentence'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Sentence', - ) - tokens: MutableSequence['Token'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='Token', - ) - entities: MutableSequence['Entity'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Entity', - ) - document_sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=4, - message='Sentiment', - ) - language: str = proto.Field( - proto.STRING, - number=5, - ) - categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='ClassificationCategory', - ) - moderation_categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='ClassificationCategory', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini deleted file mode 100644 index 574c5aed..00000000 --- a/owl-bot-staging/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py deleted file mode 100644 index b104aa2e..00000000 --- a/owl-bot-staging/v1/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/language_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py deleted file mode 100644 index 71f2d049..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeEntities_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -async def sample_analyze_entities(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entities(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeEntities_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py deleted file mode 100644 index 14beb557..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entities_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeEntities_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -def sample_analyze_entities(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = client.analyze_entities(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeEntities_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py deleted file mode 100644 index a8a1b59b..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntitySentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeEntitySentiment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -async def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeEntitySentiment_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py deleted file mode 100644 index c6d27ac8..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_entity_sentiment_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntitySentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeEntitySentiment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeEntitySentiment_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py deleted file mode 100644 index 6b65f274..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeSentiment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -async def sample_analyze_sentiment(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeSentiment_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py deleted file mode 100644 index c9a48df7..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_sentiment_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeSentiment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -def sample_analyze_sentiment(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeSentiment_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py deleted file mode 100644 index 31640e52..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSyntax -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeSyntax_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -async def sample_analyze_syntax(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = await client.analyze_syntax(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeSyntax_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py deleted file mode 100644 index 947613db..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSyntax -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnalyzeSyntax_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -def sample_analyze_syntax(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = client.analyze_syntax(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnalyzeSyntax_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py deleted file mode 100644 index 02a54aee..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnnotateText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnnotateText_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -async def sample_annotate_text(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = await client.annotate_text(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnnotateText_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py deleted file mode 100644 index 9d90a0f0..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_annotate_text_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnnotateText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_AnnotateText_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -def sample_annotate_text(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = client.annotate_text(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_AnnotateText_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py deleted file mode 100644 index a6497c09..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ClassifyText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_ClassifyText_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -async def sample_classify_text(): - # Create a client - client = language_v1.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = await client.classify_text(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_ClassifyText_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py b/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py deleted file mode 100644 index e1d32646..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_classify_text_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ClassifyText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1_generated_LanguageService_ClassifyText_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1 - - -def sample_classify_text(): - # Create a client - client = language_v1.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1.Document() - document.content = "content_value" - - request = language_v1.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = client.classify_text(request=request) - - # Handle the response - print(response) - -# [END language_v1_generated_LanguageService_ClassifyText_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json deleted file mode 100644 index 4e481f59..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json +++ /dev/null @@ -1,1190 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.language.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-language", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_entities", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntities", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeEntitiesRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeEntitiesResponse", - "shortName": "analyze_entities" - }, - "description": "Sample for AnalyzeEntities", - "file": "language_v1_generated_language_service_analyze_entities_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeEntities_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_entities_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_entities", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntities", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeEntitiesRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeEntitiesResponse", - "shortName": "analyze_entities" - }, - "description": "Sample for AnalyzeEntities", - "file": "language_v1_generated_language_service_analyze_entities_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeEntities_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_entities_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_entity_sentiment", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntitySentiment", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntitySentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeEntitySentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeEntitySentimentResponse", - "shortName": "analyze_entity_sentiment" - }, - "description": "Sample for AnalyzeEntitySentiment", - "file": "language_v1_generated_language_service_analyze_entity_sentiment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeEntitySentiment_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_entity_sentiment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_entity_sentiment", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeEntitySentiment", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntitySentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeEntitySentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeEntitySentimentResponse", - "shortName": "analyze_entity_sentiment" - }, - "description": "Sample for AnalyzeEntitySentiment", - "file": "language_v1_generated_language_service_analyze_entity_sentiment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeEntitySentiment_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_entity_sentiment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_sentiment", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSentiment", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeSentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeSentimentResponse", - "shortName": "analyze_sentiment" - }, - "description": "Sample for AnalyzeSentiment", - "file": "language_v1_generated_language_service_analyze_sentiment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeSentiment_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_sentiment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_sentiment", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSentiment", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeSentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeSentimentResponse", - "shortName": "analyze_sentiment" - }, - "description": "Sample for AnalyzeSentiment", - "file": "language_v1_generated_language_service_analyze_sentiment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeSentiment_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_sentiment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.analyze_syntax", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSyntax", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSyntax" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeSyntaxRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeSyntaxResponse", - "shortName": "analyze_syntax" - }, - "description": "Sample for AnalyzeSyntax", - "file": "language_v1_generated_language_service_analyze_syntax_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeSyntax_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_syntax_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceClient.analyze_syntax", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnalyzeSyntax", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSyntax" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnalyzeSyntaxRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnalyzeSyntaxResponse", - "shortName": "analyze_syntax" - }, - "description": "Sample for AnalyzeSyntax", - "file": "language_v1_generated_language_service_analyze_syntax_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnalyzeSyntax_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_analyze_syntax_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.annotate_text", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnnotateText", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnnotateText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnnotateTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "features", - "type": "google.cloud.language_v1.types.AnnotateTextRequest.Features" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnnotateTextResponse", - "shortName": "annotate_text" - }, - "description": "Sample for AnnotateText", - "file": "language_v1_generated_language_service_annotate_text_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnnotateText_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_annotate_text_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceClient.annotate_text", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.AnnotateText", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnnotateText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.AnnotateTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "features", - "type": "google.cloud.language_v1.types.AnnotateTextRequest.Features" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.AnnotateTextResponse", - "shortName": "annotate_text" - }, - "description": "Sample for AnnotateText", - "file": "language_v1_generated_language_service_annotate_text_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_AnnotateText_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_annotate_text_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.classify_text", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.ClassifyText", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "ClassifyText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.ClassifyTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.ClassifyTextResponse", - "shortName": "classify_text" - }, - "description": "Sample for ClassifyText", - "file": "language_v1_generated_language_service_classify_text_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_ClassifyText_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_classify_text_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceClient.classify_text", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.ClassifyText", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "ClassifyText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.ClassifyTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.ClassifyTextResponse", - "shortName": "classify_text" - }, - "description": "Sample for ClassifyText", - "file": "language_v1_generated_language_service_classify_text_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_ClassifyText_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_classify_text_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.moderate_text", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.ModerateText", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "ModerateText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.ModerateTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.ModerateTextResponse", - "shortName": "moderate_text" - }, - "description": "Sample for ModerateText", - "file": "language_v1_generated_language_service_moderate_text_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_ModerateText_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_moderate_text_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1.LanguageServiceClient.moderate_text", - "method": { - "fullName": "google.cloud.language.v1.LanguageService.ModerateText", - "service": { - "fullName": "google.cloud.language.v1.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "ModerateText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1.types.ModerateTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1.types.Document" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1.types.ModerateTextResponse", - "shortName": "moderate_text" - }, - "description": "Sample for ModerateText", - "file": "language_v1_generated_language_service_moderate_text_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1_generated_LanguageService_ModerateText_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1_generated_language_service_moderate_text_sync.py" - } - ] -} diff --git a/owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py deleted file mode 100644 index 10fa218c..00000000 --- a/owl-bot-staging/v1/scripts/fixup_language_v1_keywords.py +++ /dev/null @@ -1,182 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class languageCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'analyze_entities': ('document', 'encoding_type', ), - 'analyze_entity_sentiment': ('document', 'encoding_type', ), - 'analyze_sentiment': ('document', 'encoding_type', ), - 'analyze_syntax': ('document', 'encoding_type', ), - 'annotate_text': ('document', 'features', 'encoding_type', ), - 'classify_text': ('document', 'classification_model_options', ), - 'moderate_text': ('document', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=languageCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the language client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py deleted file mode 100644 index 047e5bce..00000000 --- a/owl-bot-staging/v1/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-language' - - -description = "Google Cloud Language API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/language/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/python-language" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v1/testing/constraints-3.10.txt b/owl-bot-staging/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.11.txt b/owl-bot-staging/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.12.txt b/owl-bot-staging/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.7.txt b/owl-bot-staging/v1/testing/constraints-3.7.txt deleted file mode 100644 index 6c44adfe..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,9 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 diff --git a/owl-bot-staging/v1/testing/constraints-3.8.txt b/owl-bot-staging/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.9.txt b/owl-bot-staging/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/language_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py b/owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py deleted file mode 100644 index 1a75b217..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/language_v1/test_language_service.py +++ /dev/null @@ -1,4070 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.language_v1.services.language_service import LanguageServiceAsyncClient -from google.cloud.language_v1.services.language_service import LanguageServiceClient -from google.cloud.language_v1.services.language_service import transports -from google.cloud.language_v1.types import language_service -from google.oauth2 import service_account -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert LanguageServiceClient._get_default_mtls_endpoint(None) is None - assert LanguageServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (LanguageServiceClient, "grpc"), - (LanguageServiceAsyncClient, "grpc_asyncio"), - (LanguageServiceClient, "rest"), -]) -def test_language_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'language.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://language.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.LanguageServiceGrpcTransport, "grpc"), - (transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.LanguageServiceRestTransport, "rest"), -]) -def test_language_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (LanguageServiceClient, "grpc"), - (LanguageServiceAsyncClient, "grpc_asyncio"), - (LanguageServiceClient, "rest"), -]) -def test_language_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'language.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://language.googleapis.com' - ) - - -def test_language_service_client_get_transport_class(): - transport = LanguageServiceClient.get_transport_class() - available_transports = [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceRestTransport, - ] - assert transport in available_transports - - transport = LanguageServiceClient.get_transport_class("grpc") - assert transport == transports.LanguageServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), -]) -@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) -@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) -def test_language_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "true"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "false"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "true"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) -@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_language_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - LanguageServiceClient, LanguageServiceAsyncClient -]) -@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) -@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) -def test_language_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), -]) -def test_language_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", None), -]) -def test_language_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_language_service_client_client_options_from_dict(): - with mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = LanguageServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_language_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "language.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="language.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSentimentRequest, - dict, -]) -def test_analyze_sentiment(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSentimentResponse( - language='language_value', - ) - response = client.analyze_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_sentiment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - client.analyze_sentiment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSentimentRequest() - -@pytest.mark.asyncio -async def test_analyze_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSentimentRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse( - language='language_value', - )) - response = await client.analyze_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSentimentResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_sentiment_async_from_dict(): - await test_analyze_sentiment_async(request_type=dict) - - -def test_analyze_sentiment_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSentimentResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_sentiment_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_sentiment( - language_service.AnalyzeSentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_sentiment_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSentimentResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_sentiment_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_sentiment( - language_service.AnalyzeSentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitiesRequest, - dict, -]) -def test_analyze_entities(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitiesResponse( - language='language_value', - ) - response = client.analyze_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitiesResponse) - assert response.language == 'language_value' - - -def test_analyze_entities_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - client.analyze_entities() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitiesRequest() - -@pytest.mark.asyncio -async def test_analyze_entities_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitiesRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse( - language='language_value', - )) - response = await client.analyze_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitiesResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_entities_async_from_dict(): - await test_analyze_entities_async(request_type=dict) - - -def test_analyze_entities_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitiesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_entities( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_entities_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entities( - language_service.AnalyzeEntitiesRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_entities_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitiesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_entities( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_entities_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_entities( - language_service.AnalyzeEntitiesRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitySentimentRequest, - dict, -]) -def test_analyze_entity_sentiment(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitySentimentResponse( - language='language_value', - ) - response = client.analyze_entity_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitySentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_entity_sentiment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - client.analyze_entity_sentiment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitySentimentRequest() - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitySentimentRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse( - language='language_value', - )) - response = await client.analyze_entity_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitySentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_async_from_dict(): - await test_analyze_entity_sentiment_async(request_type=dict) - - -def test_analyze_entity_sentiment_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitySentimentResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_entity_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_entity_sentiment_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entity_sentiment( - language_service.AnalyzeEntitySentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitySentimentResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_entity_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_entity_sentiment( - language_service.AnalyzeEntitySentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSyntaxRequest, - dict, -]) -def test_analyze_syntax(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSyntaxResponse( - language='language_value', - ) - response = client.analyze_syntax(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSyntaxRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSyntaxResponse) - assert response.language == 'language_value' - - -def test_analyze_syntax_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - client.analyze_syntax() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSyntaxRequest() - -@pytest.mark.asyncio -async def test_analyze_syntax_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSyntaxRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse( - language='language_value', - )) - response = await client.analyze_syntax(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSyntaxRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSyntaxResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_syntax_async_from_dict(): - await test_analyze_syntax_async(request_type=dict) - - -def test_analyze_syntax_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSyntaxResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_syntax( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_syntax_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_syntax( - language_service.AnalyzeSyntaxRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_syntax_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSyntaxResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_syntax( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_syntax_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_syntax( - language_service.AnalyzeSyntaxRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.ClassifyTextRequest, - dict, -]) -def test_classify_text(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ClassifyTextResponse( - ) - response = client.classify_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ClassifyTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ClassifyTextResponse) - - -def test_classify_text_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - client.classify_text() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ClassifyTextRequest() - -@pytest.mark.asyncio -async def test_classify_text_async(transport: str = 'grpc_asyncio', request_type=language_service.ClassifyTextRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse( - )) - response = await client.classify_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ClassifyTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ClassifyTextResponse) - - -@pytest.mark.asyncio -async def test_classify_text_async_from_dict(): - await test_classify_text_async(request_type=dict) - - -def test_classify_text_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ClassifyTextResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.classify_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - - -def test_classify_text_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.classify_text( - language_service.ClassifyTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - -@pytest.mark.asyncio -async def test_classify_text_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ClassifyTextResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.classify_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_classify_text_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.classify_text( - language_service.ClassifyTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.ModerateTextRequest, - dict, -]) -def test_moderate_text(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ModerateTextResponse( - ) - response = client.moderate_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ModerateTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ModerateTextResponse) - - -def test_moderate_text_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - client.moderate_text() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ModerateTextRequest() - -@pytest.mark.asyncio -async def test_moderate_text_async(transport: str = 'grpc_asyncio', request_type=language_service.ModerateTextRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.ModerateTextResponse( - )) - response = await client.moderate_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ModerateTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ModerateTextResponse) - - -@pytest.mark.asyncio -async def test_moderate_text_async_from_dict(): - await test_moderate_text_async(request_type=dict) - - -def test_moderate_text_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ModerateTextResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.moderate_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - - -def test_moderate_text_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.moderate_text( - language_service.ModerateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - -@pytest.mark.asyncio -async def test_moderate_text_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ModerateTextResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.ModerateTextResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.moderate_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_moderate_text_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.moderate_text( - language_service.ModerateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnnotateTextRequest, - dict, -]) -def test_annotate_text(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnnotateTextResponse( - language='language_value', - ) - response = client.annotate_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnnotateTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnnotateTextResponse) - assert response.language == 'language_value' - - -def test_annotate_text_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - client.annotate_text() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnnotateTextRequest() - -@pytest.mark.asyncio -async def test_annotate_text_async(transport: str = 'grpc_asyncio', request_type=language_service.AnnotateTextRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse( - language='language_value', - )) - response = await client.annotate_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnnotateTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnnotateTextResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_annotate_text_async_from_dict(): - await test_annotate_text_async(request_type=dict) - - -def test_annotate_text_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnnotateTextResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.annotate_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].features - mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_annotate_text_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.annotate_text( - language_service.AnnotateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_annotate_text_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnnotateTextResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.annotate_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].features - mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_annotate_text_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.annotate_text( - language_service.AnnotateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSentimentRequest, - dict, -]) -def test_analyze_sentiment_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSentimentResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_sentiment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_sentiment_rest_required_fields(request_type=language_service.AnalyzeSentimentRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSentimentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_sentiment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_sentiment_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_sentiment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_sentiment_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_sentiment") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_sentiment") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeSentimentRequest.pb(language_service.AnalyzeSentimentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeSentimentResponse.to_json(language_service.AnalyzeSentimentResponse()) - - request = language_service.AnalyzeSentimentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeSentimentResponse() - - client.analyze_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSentimentRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_sentiment(request) - - -def test_analyze_sentiment_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSentimentResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_sentiment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/documents:analyzeSentiment" % client.transport._host, args[1]) - - -def test_analyze_sentiment_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_sentiment( - language_service.AnalyzeSentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_sentiment_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitiesRequest, - dict, -]) -def test_analyze_entities_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitiesResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_entities(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitiesResponse) - assert response.language == 'language_value' - - -def test_analyze_entities_rest_required_fields(request_type=language_service.AnalyzeEntitiesRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitiesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_entities(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_entities_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_entities._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_entities_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entities") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entities") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeEntitiesRequest.pb(language_service.AnalyzeEntitiesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeEntitiesResponse.to_json(language_service.AnalyzeEntitiesResponse()) - - request = language_service.AnalyzeEntitiesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeEntitiesResponse() - - client.analyze_entities(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_entities_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitiesRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_entities(request) - - -def test_analyze_entities_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitiesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_entities(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/documents:analyzeEntities" % client.transport._host, args[1]) - - -def test_analyze_entities_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entities( - language_service.AnalyzeEntitiesRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_entities_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitySentimentRequest, - dict, -]) -def test_analyze_entity_sentiment_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitySentimentResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_entity_sentiment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_entity_sentiment_rest_required_fields(request_type=language_service.AnalyzeEntitySentimentRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitySentimentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_entity_sentiment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_entity_sentiment_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_entity_sentiment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entity_sentiment") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entity_sentiment") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeEntitySentimentRequest.pb(language_service.AnalyzeEntitySentimentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeEntitySentimentResponse.to_json(language_service.AnalyzeEntitySentimentResponse()) - - request = language_service.AnalyzeEntitySentimentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeEntitySentimentResponse() - - client.analyze_entity_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_entity_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitySentimentRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_entity_sentiment(request) - - -def test_analyze_entity_sentiment_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitySentimentResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_entity_sentiment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/documents:analyzeEntitySentiment" % client.transport._host, args[1]) - - -def test_analyze_entity_sentiment_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entity_sentiment( - language_service.AnalyzeEntitySentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_entity_sentiment_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSyntaxRequest, - dict, -]) -def test_analyze_syntax_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSyntaxResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_syntax(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSyntaxResponse) - assert response.language == 'language_value' - - -def test_analyze_syntax_rest_required_fields(request_type=language_service.AnalyzeSyntaxRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSyntaxResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_syntax(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_syntax_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_syntax._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_syntax_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_syntax") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_syntax") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeSyntaxRequest.pb(language_service.AnalyzeSyntaxRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeSyntaxResponse.to_json(language_service.AnalyzeSyntaxResponse()) - - request = language_service.AnalyzeSyntaxRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeSyntaxResponse() - - client.analyze_syntax(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_syntax_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSyntaxRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_syntax(request) - - -def test_analyze_syntax_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSyntaxResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_syntax(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/documents:analyzeSyntax" % client.transport._host, args[1]) - - -def test_analyze_syntax_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_syntax( - language_service.AnalyzeSyntaxRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_syntax_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.ClassifyTextRequest, - dict, -]) -def test_classify_text_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.ClassifyTextResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.ClassifyTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.classify_text(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ClassifyTextResponse) - - -def test_classify_text_rest_required_fields(request_type=language_service.ClassifyTextRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.ClassifyTextResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.ClassifyTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.classify_text(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_classify_text_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.classify_text._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_classify_text_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_classify_text") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_classify_text") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.ClassifyTextRequest.pb(language_service.ClassifyTextRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.ClassifyTextResponse.to_json(language_service.ClassifyTextResponse()) - - request = language_service.ClassifyTextRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.ClassifyTextResponse() - - client.classify_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_classify_text_rest_bad_request(transport: str = 'rest', request_type=language_service.ClassifyTextRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.classify_text(request) - - -def test_classify_text_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.ClassifyTextResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.ClassifyTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.classify_text(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/documents:classifyText" % client.transport._host, args[1]) - - -def test_classify_text_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.classify_text( - language_service.ClassifyTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - -def test_classify_text_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.ModerateTextRequest, - dict, -]) -def test_moderate_text_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.ModerateTextResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.ModerateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.moderate_text(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ModerateTextResponse) - - -def test_moderate_text_rest_required_fields(request_type=language_service.ModerateTextRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).moderate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).moderate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.ModerateTextResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.ModerateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.moderate_text(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_moderate_text_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.moderate_text._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_moderate_text_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_moderate_text") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_moderate_text") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.ModerateTextRequest.pb(language_service.ModerateTextRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.ModerateTextResponse.to_json(language_service.ModerateTextResponse()) - - request = language_service.ModerateTextRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.ModerateTextResponse() - - client.moderate_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_moderate_text_rest_bad_request(transport: str = 'rest', request_type=language_service.ModerateTextRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.moderate_text(request) - - -def test_moderate_text_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.ModerateTextResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.ModerateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.moderate_text(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/documents:moderateText" % client.transport._host, args[1]) - - -def test_moderate_text_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.moderate_text( - language_service.ModerateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - -def test_moderate_text_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnnotateTextRequest, - dict, -]) -def test_annotate_text_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnnotateTextResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnnotateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.annotate_text(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnnotateTextResponse) - assert response.language == 'language_value' - - -def test_annotate_text_rest_required_fields(request_type=language_service.AnnotateTextRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnnotateTextResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnnotateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.annotate_text(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_annotate_text_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.annotate_text._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", "features", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_annotate_text_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_annotate_text") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_annotate_text") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnnotateTextRequest.pb(language_service.AnnotateTextRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnnotateTextResponse.to_json(language_service.AnnotateTextResponse()) - - request = language_service.AnnotateTextRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnnotateTextResponse() - - client.annotate_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_annotate_text_rest_bad_request(transport: str = 'rest', request_type=language_service.AnnotateTextRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.annotate_text(request) - - -def test_annotate_text_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnnotateTextResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnnotateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.annotate_text(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/documents:annotateText" % client.transport._host, args[1]) - - -def test_annotate_text_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.annotate_text( - language_service.AnnotateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_annotate_text_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = LanguageServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.LanguageServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceGrpcAsyncIOTransport, - transports.LanguageServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = LanguageServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.LanguageServiceGrpcTransport, - ) - -def test_language_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.LanguageServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_language_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.LanguageServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'analyze_sentiment', - 'analyze_entities', - 'analyze_entity_sentiment', - 'analyze_syntax', - 'classify_text', - 'moderate_text', - 'annotate_text', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_language_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LanguageServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_language_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LanguageServiceTransport() - adc.assert_called_once() - - -def test_language_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LanguageServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceGrpcAsyncIOTransport, - ], -) -def test_language_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-language', 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceGrpcAsyncIOTransport, - transports.LanguageServiceRestTransport, - ], -) -def test_language_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.LanguageServiceGrpcTransport, grpc_helpers), - (transports.LanguageServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_language_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "language.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="language.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) -def test_language_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_language_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.LanguageServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_language_service_host_no_port(transport_name): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'language.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://language.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_language_service_host_with_port(transport_name): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'language.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://language.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_language_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = LanguageServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = LanguageServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.analyze_sentiment._session - session2 = client2.transport.analyze_sentiment._session - assert session1 != session2 - session1 = client1.transport.analyze_entities._session - session2 = client2.transport.analyze_entities._session - assert session1 != session2 - session1 = client1.transport.analyze_entity_sentiment._session - session2 = client2.transport.analyze_entity_sentiment._session - assert session1 != session2 - session1 = client1.transport.analyze_syntax._session - session2 = client2.transport.analyze_syntax._session - assert session1 != session2 - session1 = client1.transport.classify_text._session - session2 = client2.transport.classify_text._session - assert session1 != session2 - session1 = client1.transport.moderate_text._session - session2 = client2.transport.moderate_text._session - assert session1 != session2 - session1 = client1.transport.annotate_text._session - session2 = client2.transport.annotate_text._session - assert session1 != session2 -def test_language_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LanguageServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_language_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LanguageServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) -def test_language_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) -def test_language_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = LanguageServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = LanguageServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = LanguageServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = LanguageServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = LanguageServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = LanguageServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = LanguageServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = LanguageServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = LanguageServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = LanguageServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = LanguageServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1beta2/.coveragerc b/owl-bot-staging/v1beta2/.coveragerc deleted file mode 100644 index c1f51536..00000000 --- a/owl-bot-staging/v1beta2/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/language/__init__.py - google/cloud/language/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v1beta2/.flake8 b/owl-bot-staging/v1beta2/.flake8 deleted file mode 100644 index 29227d4c..00000000 --- a/owl-bot-staging/v1beta2/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v1beta2/MANIFEST.in b/owl-bot-staging/v1beta2/MANIFEST.in deleted file mode 100644 index dcc097e7..00000000 --- a/owl-bot-staging/v1beta2/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/language *.py -recursive-include google/cloud/language_v1beta2 *.py diff --git a/owl-bot-staging/v1beta2/README.rst b/owl-bot-staging/v1beta2/README.rst deleted file mode 100644 index 0c5f1b6b..00000000 --- a/owl-bot-staging/v1beta2/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Language API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Language API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1beta2/docs/conf.py b/owl-bot-staging/v1beta2/docs/conf.py deleted file mode 100644 index 2e1b322d..00000000 --- a/owl-bot-staging/v1beta2/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-language documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-language" -copyright = u"2022, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-language-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-language.tex", - u"google-cloud-language Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-language", - u"Google Cloud Language Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-language", - u"google-cloud-language Documentation", - author, - "google-cloud-language", - "GAPIC library for Google Cloud Language API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v1beta2/docs/index.rst b/owl-bot-staging/v1beta2/docs/index.rst deleted file mode 100644 index 42b8e680..00000000 --- a/owl-bot-staging/v1beta2/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - language_v1beta2/services - language_v1beta2/types diff --git a/owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst b/owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst deleted file mode 100644 index 799a7892..00000000 --- a/owl-bot-staging/v1beta2/docs/language_v1beta2/language_service.rst +++ /dev/null @@ -1,6 +0,0 @@ -LanguageService ---------------------------------- - -.. automodule:: google.cloud.language_v1beta2.services.language_service - :members: - :inherited-members: diff --git a/owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst b/owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst deleted file mode 100644 index 40ead585..00000000 --- a/owl-bot-staging/v1beta2/docs/language_v1beta2/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Language v1beta2 API -============================================== -.. toctree:: - :maxdepth: 2 - - language_service diff --git a/owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst b/owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst deleted file mode 100644 index 2e834e61..00000000 --- a/owl-bot-staging/v1beta2/docs/language_v1beta2/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Language v1beta2 API -=========================================== - -.. automodule:: google.cloud.language_v1beta2.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v1beta2/google/cloud/language/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language/__init__.py deleted file mode 100644 index 6bfa0911..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language/__init__.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.language import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.language_v1beta2.services.language_service.client import LanguageServiceClient -from google.cloud.language_v1beta2.services.language_service.async_client import LanguageServiceAsyncClient - -from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitiesRequest -from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitiesResponse -from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitySentimentRequest -from google.cloud.language_v1beta2.types.language_service import AnalyzeEntitySentimentResponse -from google.cloud.language_v1beta2.types.language_service import AnalyzeSentimentRequest -from google.cloud.language_v1beta2.types.language_service import AnalyzeSentimentResponse -from google.cloud.language_v1beta2.types.language_service import AnalyzeSyntaxRequest -from google.cloud.language_v1beta2.types.language_service import AnalyzeSyntaxResponse -from google.cloud.language_v1beta2.types.language_service import AnnotateTextRequest -from google.cloud.language_v1beta2.types.language_service import AnnotateTextResponse -from google.cloud.language_v1beta2.types.language_service import ClassificationCategory -from google.cloud.language_v1beta2.types.language_service import ClassificationModelOptions -from google.cloud.language_v1beta2.types.language_service import ClassifyTextRequest -from google.cloud.language_v1beta2.types.language_service import ClassifyTextResponse -from google.cloud.language_v1beta2.types.language_service import DependencyEdge -from google.cloud.language_v1beta2.types.language_service import Document -from google.cloud.language_v1beta2.types.language_service import Entity -from google.cloud.language_v1beta2.types.language_service import EntityMention -from google.cloud.language_v1beta2.types.language_service import ModerateTextRequest -from google.cloud.language_v1beta2.types.language_service import ModerateTextResponse -from google.cloud.language_v1beta2.types.language_service import PartOfSpeech -from google.cloud.language_v1beta2.types.language_service import Sentence -from google.cloud.language_v1beta2.types.language_service import Sentiment -from google.cloud.language_v1beta2.types.language_service import TextSpan -from google.cloud.language_v1beta2.types.language_service import Token -from google.cloud.language_v1beta2.types.language_service import EncodingType - -__all__ = ('LanguageServiceClient', - 'LanguageServiceAsyncClient', - 'AnalyzeEntitiesRequest', - 'AnalyzeEntitiesResponse', - 'AnalyzeEntitySentimentRequest', - 'AnalyzeEntitySentimentResponse', - 'AnalyzeSentimentRequest', - 'AnalyzeSentimentResponse', - 'AnalyzeSyntaxRequest', - 'AnalyzeSyntaxResponse', - 'AnnotateTextRequest', - 'AnnotateTextResponse', - 'ClassificationCategory', - 'ClassificationModelOptions', - 'ClassifyTextRequest', - 'ClassifyTextResponse', - 'DependencyEdge', - 'Document', - 'Entity', - 'EntityMention', - 'ModerateTextRequest', - 'ModerateTextResponse', - 'PartOfSpeech', - 'Sentence', - 'Sentiment', - 'TextSpan', - 'Token', - 'EncodingType', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py b/owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py deleted file mode 100644 index 405b1ceb..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1beta2/google/cloud/language/py.typed b/owl-bot-staging/v1beta2/google/cloud/language/py.typed deleted file mode 100644 index c0acc99a..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py deleted file mode 100644 index e6a87024..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/__init__.py +++ /dev/null @@ -1,80 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.language_v1beta2 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.language_service import LanguageServiceClient -from .services.language_service import LanguageServiceAsyncClient - -from .types.language_service import AnalyzeEntitiesRequest -from .types.language_service import AnalyzeEntitiesResponse -from .types.language_service import AnalyzeEntitySentimentRequest -from .types.language_service import AnalyzeEntitySentimentResponse -from .types.language_service import AnalyzeSentimentRequest -from .types.language_service import AnalyzeSentimentResponse -from .types.language_service import AnalyzeSyntaxRequest -from .types.language_service import AnalyzeSyntaxResponse -from .types.language_service import AnnotateTextRequest -from .types.language_service import AnnotateTextResponse -from .types.language_service import ClassificationCategory -from .types.language_service import ClassificationModelOptions -from .types.language_service import ClassifyTextRequest -from .types.language_service import ClassifyTextResponse -from .types.language_service import DependencyEdge -from .types.language_service import Document -from .types.language_service import Entity -from .types.language_service import EntityMention -from .types.language_service import ModerateTextRequest -from .types.language_service import ModerateTextResponse -from .types.language_service import PartOfSpeech -from .types.language_service import Sentence -from .types.language_service import Sentiment -from .types.language_service import TextSpan -from .types.language_service import Token -from .types.language_service import EncodingType - -__all__ = ( - 'LanguageServiceAsyncClient', -'AnalyzeEntitiesRequest', -'AnalyzeEntitiesResponse', -'AnalyzeEntitySentimentRequest', -'AnalyzeEntitySentimentResponse', -'AnalyzeSentimentRequest', -'AnalyzeSentimentResponse', -'AnalyzeSyntaxRequest', -'AnalyzeSyntaxResponse', -'AnnotateTextRequest', -'AnnotateTextResponse', -'ClassificationCategory', -'ClassificationModelOptions', -'ClassifyTextRequest', -'ClassifyTextResponse', -'DependencyEdge', -'Document', -'EncodingType', -'Entity', -'EntityMention', -'LanguageServiceClient', -'ModerateTextRequest', -'ModerateTextResponse', -'PartOfSpeech', -'Sentence', -'Sentiment', -'TextSpan', -'Token', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json deleted file mode 100644 index 85a901f9..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_metadata.json +++ /dev/null @@ -1,133 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.language_v1beta2", - "protoPackage": "google.cloud.language.v1beta2", - "schema": "1.0", - "services": { - "LanguageService": { - "clients": { - "grpc": { - "libraryClient": "LanguageServiceClient", - "rpcs": { - "AnalyzeEntities": { - "methods": [ - "analyze_entities" - ] - }, - "AnalyzeEntitySentiment": { - "methods": [ - "analyze_entity_sentiment" - ] - }, - "AnalyzeSentiment": { - "methods": [ - "analyze_sentiment" - ] - }, - "AnalyzeSyntax": { - "methods": [ - "analyze_syntax" - ] - }, - "AnnotateText": { - "methods": [ - "annotate_text" - ] - }, - "ClassifyText": { - "methods": [ - "classify_text" - ] - }, - "ModerateText": { - "methods": [ - "moderate_text" - ] - } - } - }, - "grpc-async": { - "libraryClient": "LanguageServiceAsyncClient", - "rpcs": { - "AnalyzeEntities": { - "methods": [ - "analyze_entities" - ] - }, - "AnalyzeEntitySentiment": { - "methods": [ - "analyze_entity_sentiment" - ] - }, - "AnalyzeSentiment": { - "methods": [ - "analyze_sentiment" - ] - }, - "AnalyzeSyntax": { - "methods": [ - "analyze_syntax" - ] - }, - "AnnotateText": { - "methods": [ - "annotate_text" - ] - }, - "ClassifyText": { - "methods": [ - "classify_text" - ] - }, - "ModerateText": { - "methods": [ - "moderate_text" - ] - } - } - }, - "rest": { - "libraryClient": "LanguageServiceClient", - "rpcs": { - "AnalyzeEntities": { - "methods": [ - "analyze_entities" - ] - }, - "AnalyzeEntitySentiment": { - "methods": [ - "analyze_entity_sentiment" - ] - }, - "AnalyzeSentiment": { - "methods": [ - "analyze_sentiment" - ] - }, - "AnalyzeSyntax": { - "methods": [ - "analyze_syntax" - ] - }, - "AnnotateText": { - "methods": [ - "annotate_text" - ] - }, - "ClassifyText": { - "methods": [ - "classify_text" - ] - }, - "ModerateText": { - "methods": [ - "moderate_text" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py deleted file mode 100644 index 405b1ceb..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed deleted file mode 100644 index c0acc99a..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-language package uses inline types. diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py deleted file mode 100644 index e8e1c384..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py deleted file mode 100644 index 6e5f9052..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import LanguageServiceClient -from .async_client import LanguageServiceAsyncClient - -__all__ = ( - 'LanguageServiceClient', - 'LanguageServiceAsyncClient', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py deleted file mode 100644 index a3a46b78..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/async_client.py +++ /dev/null @@ -1,963 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.language_v1beta2 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.language_v1beta2.types import language_service -from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport -from .client import LanguageServiceClient - - -class LanguageServiceAsyncClient: - """Provides text analysis operations such as sentiment analysis - and entity recognition. - """ - - _client: LanguageServiceClient - - DEFAULT_ENDPOINT = LanguageServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = LanguageServiceClient.DEFAULT_MTLS_ENDPOINT - - common_billing_account_path = staticmethod(LanguageServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(LanguageServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(LanguageServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(LanguageServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(LanguageServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(LanguageServiceClient.parse_common_organization_path) - common_project_path = staticmethod(LanguageServiceClient.common_project_path) - parse_common_project_path = staticmethod(LanguageServiceClient.parse_common_project_path) - common_location_path = staticmethod(LanguageServiceClient.common_location_path) - parse_common_location_path = staticmethod(LanguageServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceAsyncClient: The constructed client. - """ - return LanguageServiceClient.from_service_account_info.__func__(LanguageServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceAsyncClient: The constructed client. - """ - return LanguageServiceClient.from_service_account_file.__func__(LanguageServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return LanguageServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> LanguageServiceTransport: - """Returns the transport used by the client instance. - - Returns: - LanguageServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(LanguageServiceClient).get_transport_class, type(LanguageServiceClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, LanguageServiceTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the language service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.LanguageServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = LanguageServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def analyze_sentiment(self, - request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSentimentResponse: - r"""Analyzes the sentiment of the provided text. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - async def sample_analyze_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeSentimentRequest, dict]]): - The request object. The sentiment analysis request - message. - document (:class:`google.cloud.language_v1beta2.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): - The encoding type used by the API to - calculate sentence offsets for the - sentence sentiment. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeSentimentResponse: - The sentiment analysis response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeSentimentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_entities(self, - request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitiesResponse: - r"""Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - async def sample_analyze_entities(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entities(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest, dict]]): - The request object. The entity analysis request message. - document (:class:`google.cloud.language_v1beta2.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse: - The entity analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeEntitiesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_entities, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_entity_sentiment(self, - request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitySentimentResponse: - r"""Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - async def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest, dict]]): - The request object. The entity-level sentiment analysis - request message. - document (:class:`google.cloud.language_v1beta2.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse: - The entity-level sentiment analysis - response message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeEntitySentimentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_entity_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_syntax(self, - request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSyntaxResponse: - r"""Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - async def sample_analyze_syntax(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = await client.analyze_syntax(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest, dict]]): - The request object. The syntax analysis request message. - document (:class:`google.cloud.language_v1beta2.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse: - The syntax analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnalyzeSyntaxRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_syntax, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def classify_text(self, - request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.ClassifyTextResponse: - r"""Classifies a document into categories. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - async def sample_classify_text(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = await client.classify_text(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1beta2.types.ClassifyTextRequest, dict]]): - The request object. The document classification request - message. - document (:class:`google.cloud.language_v1beta2.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.ClassifyTextResponse: - The document classification response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.ClassifyTextRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.classify_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def moderate_text(self, - request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.ModerateTextResponse: - r"""Moderates a document for harmful and sensitive - categories. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - async def sample_moderate_text(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.ModerateTextRequest( - document=document, - ) - - # Make the request - response = await client.moderate_text(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1beta2.types.ModerateTextRequest, dict]]): - The request object. The document moderation request - message. - document (:class:`google.cloud.language_v1beta2.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.ModerateTextResponse: - The document moderation response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.ModerateTextRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.moderate_text, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def annotate_text(self, - request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - features: Optional[language_service.AnnotateTextRequest.Features] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnnotateTextResponse: - r"""A convenience method that provides all syntax, - sentiment, entity, and classification features in one - call. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - async def sample_annotate_text(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = await client.annotate_text(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.language_v1beta2.types.AnnotateTextRequest, dict]]): - The request object. The request message for the text - annotation API, which can perform - multiple analysis types (sentiment, - entities, and syntax) in one call. - document (:class:`google.cloud.language_v1beta2.types.Document`): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - features (:class:`google.cloud.language_v1beta2.types.AnnotateTextRequest.Features`): - Required. The enabled features. - This corresponds to the ``features`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (:class:`google.cloud.language_v1beta2.types.EncodingType`): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnnotateTextResponse: - The text annotations response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, features, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = language_service.AnnotateTextRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if features is not None: - request.features = features - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.annotate_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "LanguageServiceAsyncClient", -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py deleted file mode 100644 index 9093d5b6..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/client.py +++ /dev/null @@ -1,1116 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.language_v1beta2 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.language_v1beta2.types import language_service -from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import LanguageServiceGrpcTransport -from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport -from .transports.rest import LanguageServiceRestTransport - - -class LanguageServiceClientMeta(type): - """Metaclass for the LanguageService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] - _transport_registry["grpc"] = LanguageServiceGrpcTransport - _transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport - _transport_registry["rest"] = LanguageServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[LanguageServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class LanguageServiceClient(metaclass=LanguageServiceClientMeta): - """Provides text analysis operations such as sentiment analysis - and entity recognition. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "language.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> LanguageServiceTransport: - """Returns the transport used by the client instance. - - Returns: - LanguageServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LanguageServiceTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the language service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, LanguageServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, LanguageServiceTransport): - # transport is a LanguageServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def analyze_sentiment(self, - request: Optional[Union[language_service.AnalyzeSentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSentimentResponse: - r"""Analyzes the sentiment of the provided text. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - def sample_analyze_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1beta2.types.AnalyzeSentimentRequest, dict]): - The request object. The sentiment analysis request - message. - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate sentence offsets for the - sentence sentiment. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeSentimentResponse: - The sentiment analysis response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeSentimentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeSentimentRequest): - request = language_service.AnalyzeSentimentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_sentiment] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_entities(self, - request: Optional[Union[language_service.AnalyzeEntitiesRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitiesResponse: - r"""Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - def sample_analyze_entities(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = client.analyze_entities(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest, dict]): - The request object. The entity analysis request message. - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse: - The entity analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeEntitiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeEntitiesRequest): - request = language_service.AnalyzeEntitiesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_entities] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_entity_sentiment(self, - request: Optional[Union[language_service.AnalyzeEntitySentimentRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeEntitySentimentResponse: - r"""Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest, dict]): - The request object. The entity-level sentiment analysis - request message. - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse: - The entity-level sentiment analysis - response message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeEntitySentimentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeEntitySentimentRequest): - request = language_service.AnalyzeEntitySentimentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_entity_sentiment] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_syntax(self, - request: Optional[Union[language_service.AnalyzeSyntaxRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnalyzeSyntaxResponse: - r"""Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - def sample_analyze_syntax(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = client.analyze_syntax(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest, dict]): - The request object. The syntax analysis request message. - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse: - The syntax analysis response message. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnalyzeSyntaxRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnalyzeSyntaxRequest): - request = language_service.AnalyzeSyntaxRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_syntax] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def classify_text(self, - request: Optional[Union[language_service.ClassifyTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.ClassifyTextResponse: - r"""Classifies a document into categories. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - def sample_classify_text(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = client.classify_text(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1beta2.types.ClassifyTextRequest, dict]): - The request object. The document classification request - message. - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.ClassifyTextResponse: - The document classification response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.ClassifyTextRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.ClassifyTextRequest): - request = language_service.ClassifyTextRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.classify_text] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def moderate_text(self, - request: Optional[Union[language_service.ModerateTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.ModerateTextResponse: - r"""Moderates a document for harmful and sensitive - categories. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - def sample_moderate_text(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.ModerateTextRequest( - document=document, - ) - - # Make the request - response = client.moderate_text(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1beta2.types.ModerateTextRequest, dict]): - The request object. The document moderation request - message. - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.ModerateTextResponse: - The document moderation response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.ModerateTextRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.ModerateTextRequest): - request = language_service.ModerateTextRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.moderate_text] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def annotate_text(self, - request: Optional[Union[language_service.AnnotateTextRequest, dict]] = None, - *, - document: Optional[language_service.Document] = None, - features: Optional[language_service.AnnotateTextRequest.Features] = None, - encoding_type: Optional[language_service.EncodingType] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> language_service.AnnotateTextResponse: - r"""A convenience method that provides all syntax, - sentiment, entity, and classification features in one - call. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import language_v1beta2 - - def sample_annotate_text(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = client.annotate_text(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.language_v1beta2.types.AnnotateTextRequest, dict]): - The request object. The request message for the text - annotation API, which can perform - multiple analysis types (sentiment, - entities, and syntax) in one call. - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - features (google.cloud.language_v1beta2.types.AnnotateTextRequest.Features): - Required. The enabled features. - This corresponds to the ``features`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - - This corresponds to the ``encoding_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.language_v1beta2.types.AnnotateTextResponse: - The text annotations response - message. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([document, features, encoding_type]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a language_service.AnnotateTextRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, language_service.AnnotateTextRequest): - request = language_service.AnnotateTextRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if document is not None: - request.document = document - if features is not None: - request.features = features - if encoding_type is not None: - request.encoding_type = encoding_type - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.annotate_text] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "LanguageServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "LanguageServiceClient", -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py deleted file mode 100644 index 3cb6ab92..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import LanguageServiceTransport -from .grpc import LanguageServiceGrpcTransport -from .grpc_asyncio import LanguageServiceGrpcAsyncIOTransport -from .rest import LanguageServiceRestTransport -from .rest import LanguageServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] -_transport_registry['grpc'] = LanguageServiceGrpcTransport -_transport_registry['grpc_asyncio'] = LanguageServiceGrpcAsyncIOTransport -_transport_registry['rest'] = LanguageServiceRestTransport - -__all__ = ( - 'LanguageServiceTransport', - 'LanguageServiceGrpcTransport', - 'LanguageServiceGrpcAsyncIOTransport', - 'LanguageServiceRestTransport', - 'LanguageServiceRestInterceptor', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py deleted file mode 100644 index 99ee1db2..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/base.py +++ /dev/null @@ -1,275 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.language_v1beta2 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.language_v1beta2.types import language_service - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class LanguageServiceTransport(abc.ABC): - """Abstract transport class for LanguageService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'language.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.analyze_sentiment: gapic_v1.method.wrap_method( - self.analyze_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.analyze_entities: gapic_v1.method.wrap_method( - self.analyze_entities, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.analyze_entity_sentiment: gapic_v1.method.wrap_method( - self.analyze_entity_sentiment, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.analyze_syntax: gapic_v1.method.wrap_method( - self.analyze_syntax, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.classify_text: gapic_v1.method.wrap_method( - self.classify_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.moderate_text: gapic_v1.method.wrap_method( - self.moderate_text, - default_timeout=None, - client_info=client_info, - ), - self.annotate_text: gapic_v1.method.wrap_method( - self.annotate_text, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - Union[ - language_service.AnalyzeSentimentResponse, - Awaitable[language_service.AnalyzeSentimentResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - Union[ - language_service.AnalyzeEntitiesResponse, - Awaitable[language_service.AnalyzeEntitiesResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - Union[ - language_service.AnalyzeEntitySentimentResponse, - Awaitable[language_service.AnalyzeEntitySentimentResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - Union[ - language_service.AnalyzeSyntaxResponse, - Awaitable[language_service.AnalyzeSyntaxResponse] - ]]: - raise NotImplementedError() - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - Union[ - language_service.ClassifyTextResponse, - Awaitable[language_service.ClassifyTextResponse] - ]]: - raise NotImplementedError() - - @property - def moderate_text(self) -> Callable[ - [language_service.ModerateTextRequest], - Union[ - language_service.ModerateTextResponse, - Awaitable[language_service.ModerateTextResponse] - ]]: - raise NotImplementedError() - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - Union[ - language_service.AnnotateTextResponse, - Awaitable[language_service.AnnotateTextResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'LanguageServiceTransport', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py deleted file mode 100644 index 48b7cd8b..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc.py +++ /dev/null @@ -1,432 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.language_v1beta2.types import language_service -from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO - - -class LanguageServiceGrpcTransport(LanguageServiceTransport): - """gRPC backend transport for LanguageService. - - Provides text analysis operations such as sentiment analysis - and entity recognition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - language_service.AnalyzeSentimentResponse]: - r"""Return a callable for the analyze sentiment method over gRPC. - - Analyzes the sentiment of the provided text. - - Returns: - Callable[[~.AnalyzeSentimentRequest], - ~.AnalyzeSentimentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_sentiment' not in self._stubs: - self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment', - request_serializer=language_service.AnalyzeSentimentRequest.serialize, - response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, - ) - return self._stubs['analyze_sentiment'] - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - language_service.AnalyzeEntitiesResponse]: - r"""Return a callable for the analyze entities method over gRPC. - - Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - Returns: - Callable[[~.AnalyzeEntitiesRequest], - ~.AnalyzeEntitiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entities' not in self._stubs: - self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities', - request_serializer=language_service.AnalyzeEntitiesRequest.serialize, - response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, - ) - return self._stubs['analyze_entities'] - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - language_service.AnalyzeEntitySentimentResponse]: - r"""Return a callable for the analyze entity sentiment method over gRPC. - - Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - Returns: - Callable[[~.AnalyzeEntitySentimentRequest], - ~.AnalyzeEntitySentimentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entity_sentiment' not in self._stubs: - self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment', - request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, - response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, - ) - return self._stubs['analyze_entity_sentiment'] - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - language_service.AnalyzeSyntaxResponse]: - r"""Return a callable for the analyze syntax method over gRPC. - - Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - Returns: - Callable[[~.AnalyzeSyntaxRequest], - ~.AnalyzeSyntaxResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_syntax' not in self._stubs: - self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax', - request_serializer=language_service.AnalyzeSyntaxRequest.serialize, - response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, - ) - return self._stubs['analyze_syntax'] - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - language_service.ClassifyTextResponse]: - r"""Return a callable for the classify text method over gRPC. - - Classifies a document into categories. - - Returns: - Callable[[~.ClassifyTextRequest], - ~.ClassifyTextResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'classify_text' not in self._stubs: - self._stubs['classify_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/ClassifyText', - request_serializer=language_service.ClassifyTextRequest.serialize, - response_deserializer=language_service.ClassifyTextResponse.deserialize, - ) - return self._stubs['classify_text'] - - @property - def moderate_text(self) -> Callable[ - [language_service.ModerateTextRequest], - language_service.ModerateTextResponse]: - r"""Return a callable for the moderate text method over gRPC. - - Moderates a document for harmful and sensitive - categories. - - Returns: - Callable[[~.ModerateTextRequest], - ~.ModerateTextResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'moderate_text' not in self._stubs: - self._stubs['moderate_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/ModerateText', - request_serializer=language_service.ModerateTextRequest.serialize, - response_deserializer=language_service.ModerateTextResponse.deserialize, - ) - return self._stubs['moderate_text'] - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - language_service.AnnotateTextResponse]: - r"""Return a callable for the annotate text method over gRPC. - - A convenience method that provides all syntax, - sentiment, entity, and classification features in one - call. - - Returns: - Callable[[~.AnnotateTextRequest], - ~.AnnotateTextResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'annotate_text' not in self._stubs: - self._stubs['annotate_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnnotateText', - request_serializer=language_service.AnnotateTextRequest.serialize, - response_deserializer=language_service.AnnotateTextResponse.deserialize, - ) - return self._stubs['annotate_text'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'LanguageServiceGrpcTransport', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py deleted file mode 100644 index 710e8bb5..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,431 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.language_v1beta2.types import language_service -from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import LanguageServiceGrpcTransport - - -class LanguageServiceGrpcAsyncIOTransport(LanguageServiceTransport): - """gRPC AsyncIO backend transport for LanguageService. - - Provides text analysis operations such as sentiment analysis - and entity recognition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - Awaitable[language_service.AnalyzeSentimentResponse]]: - r"""Return a callable for the analyze sentiment method over gRPC. - - Analyzes the sentiment of the provided text. - - Returns: - Callable[[~.AnalyzeSentimentRequest], - Awaitable[~.AnalyzeSentimentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_sentiment' not in self._stubs: - self._stubs['analyze_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment', - request_serializer=language_service.AnalyzeSentimentRequest.serialize, - response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, - ) - return self._stubs['analyze_sentiment'] - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - Awaitable[language_service.AnalyzeEntitiesResponse]]: - r"""Return a callable for the analyze entities method over gRPC. - - Finds named entities (currently proper names and - common nouns) in the text along with entity types, - salience, mentions for each entity, and other - properties. - - Returns: - Callable[[~.AnalyzeEntitiesRequest], - Awaitable[~.AnalyzeEntitiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entities' not in self._stubs: - self._stubs['analyze_entities'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities', - request_serializer=language_service.AnalyzeEntitiesRequest.serialize, - response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, - ) - return self._stubs['analyze_entities'] - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - Awaitable[language_service.AnalyzeEntitySentimentResponse]]: - r"""Return a callable for the analyze entity sentiment method over gRPC. - - Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity - and its mentions. - - Returns: - Callable[[~.AnalyzeEntitySentimentRequest], - Awaitable[~.AnalyzeEntitySentimentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_entity_sentiment' not in self._stubs: - self._stubs['analyze_entity_sentiment'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment', - request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, - response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, - ) - return self._stubs['analyze_entity_sentiment'] - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - Awaitable[language_service.AnalyzeSyntaxResponse]]: - r"""Return a callable for the analyze syntax method over gRPC. - - Analyzes the syntax of the text and provides sentence - boundaries and tokenization along with part of speech - tags, dependency trees, and other properties. - - Returns: - Callable[[~.AnalyzeSyntaxRequest], - Awaitable[~.AnalyzeSyntaxResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_syntax' not in self._stubs: - self._stubs['analyze_syntax'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax', - request_serializer=language_service.AnalyzeSyntaxRequest.serialize, - response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, - ) - return self._stubs['analyze_syntax'] - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - Awaitable[language_service.ClassifyTextResponse]]: - r"""Return a callable for the classify text method over gRPC. - - Classifies a document into categories. - - Returns: - Callable[[~.ClassifyTextRequest], - Awaitable[~.ClassifyTextResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'classify_text' not in self._stubs: - self._stubs['classify_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/ClassifyText', - request_serializer=language_service.ClassifyTextRequest.serialize, - response_deserializer=language_service.ClassifyTextResponse.deserialize, - ) - return self._stubs['classify_text'] - - @property - def moderate_text(self) -> Callable[ - [language_service.ModerateTextRequest], - Awaitable[language_service.ModerateTextResponse]]: - r"""Return a callable for the moderate text method over gRPC. - - Moderates a document for harmful and sensitive - categories. - - Returns: - Callable[[~.ModerateTextRequest], - Awaitable[~.ModerateTextResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'moderate_text' not in self._stubs: - self._stubs['moderate_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/ModerateText', - request_serializer=language_service.ModerateTextRequest.serialize, - response_deserializer=language_service.ModerateTextResponse.deserialize, - ) - return self._stubs['moderate_text'] - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - Awaitable[language_service.AnnotateTextResponse]]: - r"""Return a callable for the annotate text method over gRPC. - - A convenience method that provides all syntax, - sentiment, entity, and classification features in one - call. - - Returns: - Callable[[~.AnnotateTextRequest], - Awaitable[~.AnnotateTextResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'annotate_text' not in self._stubs: - self._stubs['annotate_text'] = self.grpc_channel.unary_unary( - '/google.cloud.language.v1beta2.LanguageService/AnnotateText', - request_serializer=language_service.AnnotateTextRequest.serialize, - response_deserializer=language_service.AnnotateTextResponse.deserialize, - ) - return self._stubs['annotate_text'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'LanguageServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py deleted file mode 100644 index 9696c821..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/services/language_service/transports/rest.py +++ /dev/null @@ -1,1029 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.language_v1beta2.types import language_service - -from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class LanguageServiceRestInterceptor: - """Interceptor for LanguageService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the LanguageServiceRestTransport. - - .. code-block:: python - class MyCustomLanguageServiceInterceptor(LanguageServiceRestInterceptor): - def pre_analyze_entities(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_entities(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_entity_sentiment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_entity_sentiment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_sentiment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_sentiment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_syntax(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_syntax(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_annotate_text(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_annotate_text(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_classify_text(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_classify_text(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_moderate_text(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_moderate_text(self, response): - logging.log(f"Received response: {response}") - return response - - transport = LanguageServiceRestTransport(interceptor=MyCustomLanguageServiceInterceptor()) - client = LanguageServiceClient(transport=transport) - - - """ - def pre_analyze_entities(self, request: language_service.AnalyzeEntitiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitiesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_entities - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_entities(self, response: language_service.AnalyzeEntitiesResponse) -> language_service.AnalyzeEntitiesResponse: - """Post-rpc interceptor for analyze_entities - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_analyze_entity_sentiment(self, request: language_service.AnalyzeEntitySentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeEntitySentimentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_entity_sentiment - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_entity_sentiment(self, response: language_service.AnalyzeEntitySentimentResponse) -> language_service.AnalyzeEntitySentimentResponse: - """Post-rpc interceptor for analyze_entity_sentiment - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_analyze_sentiment(self, request: language_service.AnalyzeSentimentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSentimentRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_sentiment - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_sentiment(self, response: language_service.AnalyzeSentimentResponse) -> language_service.AnalyzeSentimentResponse: - """Post-rpc interceptor for analyze_sentiment - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_analyze_syntax(self, request: language_service.AnalyzeSyntaxRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnalyzeSyntaxRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for analyze_syntax - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_analyze_syntax(self, response: language_service.AnalyzeSyntaxResponse) -> language_service.AnalyzeSyntaxResponse: - """Post-rpc interceptor for analyze_syntax - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_annotate_text(self, request: language_service.AnnotateTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.AnnotateTextRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for annotate_text - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_annotate_text(self, response: language_service.AnnotateTextResponse) -> language_service.AnnotateTextResponse: - """Post-rpc interceptor for annotate_text - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_classify_text(self, request: language_service.ClassifyTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.ClassifyTextRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for classify_text - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_classify_text(self, response: language_service.ClassifyTextResponse) -> language_service.ClassifyTextResponse: - """Post-rpc interceptor for classify_text - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - def pre_moderate_text(self, request: language_service.ModerateTextRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[language_service.ModerateTextRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for moderate_text - - Override in a subclass to manipulate the request or metadata - before they are sent to the LanguageService server. - """ - return request, metadata - - def post_moderate_text(self, response: language_service.ModerateTextResponse) -> language_service.ModerateTextResponse: - """Post-rpc interceptor for moderate_text - - Override in a subclass to manipulate the response - after it is returned by the LanguageService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class LanguageServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: LanguageServiceRestInterceptor - - -class LanguageServiceRestTransport(LanguageServiceTransport): - """REST backend transport for LanguageService. - - Provides text analysis operations such as sentiment analysis - and entity recognition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'language.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[LanguageServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or LanguageServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _AnalyzeEntities(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeEntities") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeEntitiesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeEntitiesResponse: - r"""Call the analyze entities method over HTTP. - - Args: - request (~.language_service.AnalyzeEntitiesRequest): - The request object. The entity analysis request message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeEntitiesResponse: - The entity analysis response message. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta2/documents:analyzeEntities', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_entities(request, metadata) - pb_request = language_service.AnalyzeEntitiesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeEntitiesResponse() - pb_resp = language_service.AnalyzeEntitiesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_entities(resp) - return resp - - class _AnalyzeEntitySentiment(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeEntitySentiment") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeEntitySentimentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeEntitySentimentResponse: - r"""Call the analyze entity sentiment method over HTTP. - - Args: - request (~.language_service.AnalyzeEntitySentimentRequest): - The request object. The entity-level sentiment analysis - request message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeEntitySentimentResponse: - The entity-level sentiment analysis - response message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta2/documents:analyzeEntitySentiment', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_entity_sentiment(request, metadata) - pb_request = language_service.AnalyzeEntitySentimentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeEntitySentimentResponse() - pb_resp = language_service.AnalyzeEntitySentimentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_entity_sentiment(resp) - return resp - - class _AnalyzeSentiment(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeSentiment") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeSentimentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeSentimentResponse: - r"""Call the analyze sentiment method over HTTP. - - Args: - request (~.language_service.AnalyzeSentimentRequest): - The request object. The sentiment analysis request - message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeSentimentResponse: - The sentiment analysis response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta2/documents:analyzeSentiment', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_sentiment(request, metadata) - pb_request = language_service.AnalyzeSentimentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeSentimentResponse() - pb_resp = language_service.AnalyzeSentimentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_sentiment(resp) - return resp - - class _AnalyzeSyntax(LanguageServiceRestStub): - def __hash__(self): - return hash("AnalyzeSyntax") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnalyzeSyntaxRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnalyzeSyntaxResponse: - r"""Call the analyze syntax method over HTTP. - - Args: - request (~.language_service.AnalyzeSyntaxRequest): - The request object. The syntax analysis request message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnalyzeSyntaxResponse: - The syntax analysis response message. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta2/documents:analyzeSyntax', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_analyze_syntax(request, metadata) - pb_request = language_service.AnalyzeSyntaxRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnalyzeSyntaxResponse() - pb_resp = language_service.AnalyzeSyntaxResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_analyze_syntax(resp) - return resp - - class _AnnotateText(LanguageServiceRestStub): - def __hash__(self): - return hash("AnnotateText") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.AnnotateTextRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.AnnotateTextResponse: - r"""Call the annotate text method over HTTP. - - Args: - request (~.language_service.AnnotateTextRequest): - The request object. The request message for the text - annotation API, which can perform - multiple analysis types (sentiment, - entities, and syntax) in one call. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.AnnotateTextResponse: - The text annotations response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta2/documents:annotateText', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_annotate_text(request, metadata) - pb_request = language_service.AnnotateTextRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.AnnotateTextResponse() - pb_resp = language_service.AnnotateTextResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_annotate_text(resp) - return resp - - class _ClassifyText(LanguageServiceRestStub): - def __hash__(self): - return hash("ClassifyText") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.ClassifyTextRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.ClassifyTextResponse: - r"""Call the classify text method over HTTP. - - Args: - request (~.language_service.ClassifyTextRequest): - The request object. The document classification request - message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.ClassifyTextResponse: - The document classification response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta2/documents:classifyText', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_classify_text(request, metadata) - pb_request = language_service.ClassifyTextRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.ClassifyTextResponse() - pb_resp = language_service.ClassifyTextResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_classify_text(resp) - return resp - - class _ModerateText(LanguageServiceRestStub): - def __hash__(self): - return hash("ModerateText") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: language_service.ModerateTextRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> language_service.ModerateTextResponse: - r"""Call the moderate text method over HTTP. - - Args: - request (~.language_service.ModerateTextRequest): - The request object. The document moderation request - message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.language_service.ModerateTextResponse: - The document moderation response - message. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta2/documents:moderateText', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_moderate_text(request, metadata) - pb_request = language_service.ModerateTextRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = language_service.ModerateTextResponse() - pb_resp = language_service.ModerateTextResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_moderate_text(resp) - return resp - - @property - def analyze_entities(self) -> Callable[ - [language_service.AnalyzeEntitiesRequest], - language_service.AnalyzeEntitiesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeEntities(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_entity_sentiment(self) -> Callable[ - [language_service.AnalyzeEntitySentimentRequest], - language_service.AnalyzeEntitySentimentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeEntitySentiment(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_sentiment(self) -> Callable[ - [language_service.AnalyzeSentimentRequest], - language_service.AnalyzeSentimentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeSentiment(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_syntax(self) -> Callable[ - [language_service.AnalyzeSyntaxRequest], - language_service.AnalyzeSyntaxResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeSyntax(self._session, self._host, self._interceptor) # type: ignore - - @property - def annotate_text(self) -> Callable[ - [language_service.AnnotateTextRequest], - language_service.AnnotateTextResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnnotateText(self._session, self._host, self._interceptor) # type: ignore - - @property - def classify_text(self) -> Callable[ - [language_service.ClassifyTextRequest], - language_service.ClassifyTextResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ClassifyText(self._session, self._host, self._interceptor) # type: ignore - - @property - def moderate_text(self) -> Callable[ - [language_service.ModerateTextRequest], - language_service.ModerateTextResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ModerateText(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'LanguageServiceRestTransport', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py deleted file mode 100644 index 8dadfa8a..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/__init__.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .language_service import ( - AnalyzeEntitiesRequest, - AnalyzeEntitiesResponse, - AnalyzeEntitySentimentRequest, - AnalyzeEntitySentimentResponse, - AnalyzeSentimentRequest, - AnalyzeSentimentResponse, - AnalyzeSyntaxRequest, - AnalyzeSyntaxResponse, - AnnotateTextRequest, - AnnotateTextResponse, - ClassificationCategory, - ClassificationModelOptions, - ClassifyTextRequest, - ClassifyTextResponse, - DependencyEdge, - Document, - Entity, - EntityMention, - ModerateTextRequest, - ModerateTextResponse, - PartOfSpeech, - Sentence, - Sentiment, - TextSpan, - Token, - EncodingType, -) - -__all__ = ( - 'AnalyzeEntitiesRequest', - 'AnalyzeEntitiesResponse', - 'AnalyzeEntitySentimentRequest', - 'AnalyzeEntitySentimentResponse', - 'AnalyzeSentimentRequest', - 'AnalyzeSentimentResponse', - 'AnalyzeSyntaxRequest', - 'AnalyzeSyntaxResponse', - 'AnnotateTextRequest', - 'AnnotateTextResponse', - 'ClassificationCategory', - 'ClassificationModelOptions', - 'ClassifyTextRequest', - 'ClassifyTextResponse', - 'DependencyEdge', - 'Document', - 'Entity', - 'EntityMention', - 'ModerateTextRequest', - 'ModerateTextResponse', - 'PartOfSpeech', - 'Sentence', - 'Sentiment', - 'TextSpan', - 'Token', - 'EncodingType', -) diff --git a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py b/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py deleted file mode 100644 index 3b27605f..00000000 --- a/owl-bot-staging/v1beta2/google/cloud/language_v1beta2/types/language_service.py +++ /dev/null @@ -1,1761 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.language.v1beta2', - manifest={ - 'EncodingType', - 'Document', - 'Sentence', - 'Entity', - 'Token', - 'Sentiment', - 'PartOfSpeech', - 'DependencyEdge', - 'EntityMention', - 'TextSpan', - 'ClassificationCategory', - 'ClassificationModelOptions', - 'AnalyzeSentimentRequest', - 'AnalyzeSentimentResponse', - 'AnalyzeEntitySentimentRequest', - 'AnalyzeEntitySentimentResponse', - 'AnalyzeEntitiesRequest', - 'AnalyzeEntitiesResponse', - 'AnalyzeSyntaxRequest', - 'AnalyzeSyntaxResponse', - 'ClassifyTextRequest', - 'ClassifyTextResponse', - 'ModerateTextRequest', - 'ModerateTextResponse', - 'AnnotateTextRequest', - 'AnnotateTextResponse', - }, -) - - -class EncodingType(proto.Enum): - r"""Represents the text encoding that the caller uses to process the - output. Providing an ``EncodingType`` is recommended because the API - provides the beginning offsets for various outputs, such as tokens - and mentions, and languages that natively use different text - encodings may access offsets differently. - - Values: - NONE (0): - If ``EncodingType`` is not specified, encoding-dependent - information (such as ``begin_offset``) will be set at - ``-1``. - UTF8 (1): - Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-8 encoding of the input. C++ and - Go are examples of languages that use this encoding - natively. - UTF16 (2): - Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-16 encoding of the input. Java - and JavaScript are examples of languages that use this - encoding natively. - UTF32 (3): - Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-32 encoding of the input. Python - is an example of a language that uses this encoding - natively. - """ - NONE = 0 - UTF8 = 1 - UTF16 = 2 - UTF32 = 3 - - -class Document(proto.Message): - r"""Represents the input to API methods. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - type_ (google.cloud.language_v1beta2.types.Document.Type): - Required. If the type is not set or is ``TYPE_UNSPECIFIED``, - returns an ``INVALID_ARGUMENT`` error. - content (str): - The content of the input in string format. - Cloud audit logging exempt since it is based on - user data. - - This field is a member of `oneof`_ ``source``. - gcs_content_uri (str): - The Google Cloud Storage URI where the file content is - located. This URI must be of the form: - gs://bucket_name/object_name. For more details, see - https://cloud.google.com/storage/docs/reference-uris. NOTE: - Cloud Storage object versioning is not supported. - - This field is a member of `oneof`_ ``source``. - language (str): - The language of the document (if not specified, the language - is automatically detected). Both ISO and BCP-47 language - codes are accepted. `Language - Support `__ - lists currently supported languages for each API method. If - the language (either specified by the caller or - automatically detected) is not supported by the called API - method, an ``INVALID_ARGUMENT`` error is returned. - reference_web_uri (str): - The web URI where the document comes from. - This URI is not used for fetching the content, - but as a hint for analyzing the document. - boilerplate_handling (google.cloud.language_v1beta2.types.Document.BoilerplateHandling): - Indicates how detected boilerplate(e.g. - advertisements, copyright declarations, banners) - should be handled for this document. If not - specified, boilerplate will be treated the same - as content. - """ - class Type(proto.Enum): - r"""The document types enum. - - Values: - TYPE_UNSPECIFIED (0): - The content type is not specified. - PLAIN_TEXT (1): - Plain text - HTML (2): - HTML - """ - TYPE_UNSPECIFIED = 0 - PLAIN_TEXT = 1 - HTML = 2 - - class BoilerplateHandling(proto.Enum): - r"""Ways of handling boilerplate detected in the document - - Values: - BOILERPLATE_HANDLING_UNSPECIFIED (0): - The boilerplate handling is not specified. - SKIP_BOILERPLATE (1): - Do not analyze detected boilerplate. - Reference web URI is required for detecting - boilerplate. - KEEP_BOILERPLATE (2): - Treat boilerplate the same as content. - """ - BOILERPLATE_HANDLING_UNSPECIFIED = 0 - SKIP_BOILERPLATE = 1 - KEEP_BOILERPLATE = 2 - - type_: Type = proto.Field( - proto.ENUM, - number=1, - enum=Type, - ) - content: str = proto.Field( - proto.STRING, - number=2, - oneof='source', - ) - gcs_content_uri: str = proto.Field( - proto.STRING, - number=3, - oneof='source', - ) - language: str = proto.Field( - proto.STRING, - number=4, - ) - reference_web_uri: str = proto.Field( - proto.STRING, - number=5, - ) - boilerplate_handling: BoilerplateHandling = proto.Field( - proto.ENUM, - number=6, - enum=BoilerplateHandling, - ) - - -class Sentence(proto.Message): - r"""Represents a sentence in the input document. - - Attributes: - text (google.cloud.language_v1beta2.types.TextSpan): - The sentence text. - sentiment (google.cloud.language_v1beta2.types.Sentiment): - For calls to [AnalyzeSentiment][] or if - [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] - is set to true, this field will contain the sentiment for - the sentence. - """ - - text: 'TextSpan' = proto.Field( - proto.MESSAGE, - number=1, - message='TextSpan', - ) - sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=2, - message='Sentiment', - ) - - -class Entity(proto.Message): - r"""Represents a phrase in the text that is a known entity, such - as a person, an organization, or location. The API associates - information, such as salience and mentions, with entities. - - Attributes: - name (str): - The representative name for the entity. - type_ (google.cloud.language_v1beta2.types.Entity.Type): - The entity type. - metadata (MutableMapping[str, str]): - Metadata associated with the entity. - - For most entity types, the metadata is a Wikipedia URL - (``wikipedia_url``) and Knowledge Graph MID (``mid``), if - they are available. For the metadata associated with other - entity types, see the Type table below. - salience (float): - The salience score associated with the entity in the [0, - 1.0] range. - - The salience score for an entity provides information about - the importance or centrality of that entity to the entire - document text. Scores closer to 0 are less salient, while - scores closer to 1.0 are highly salient. - mentions (MutableSequence[google.cloud.language_v1beta2.types.EntityMention]): - The mentions of this entity in the input - document. The API currently supports proper noun - mentions. - sentiment (google.cloud.language_v1beta2.types.Sentiment): - For calls to [AnalyzeEntitySentiment][] or if - [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] - is set to true, this field will contain the aggregate - sentiment expressed for this entity in the provided - document. - """ - class Type(proto.Enum): - r"""The type of the entity. For most entity types, the associated - metadata is a Wikipedia URL (``wikipedia_url``) and Knowledge Graph - MID (``mid``). The table below lists the associated fields for - entities that have different metadata. - - Values: - UNKNOWN (0): - Unknown - PERSON (1): - Person - LOCATION (2): - Location - ORGANIZATION (3): - Organization - EVENT (4): - Event - WORK_OF_ART (5): - Artwork - CONSUMER_GOOD (6): - Consumer product - OTHER (7): - Other types of entities - PHONE_NUMBER (9): - Phone number - - The metadata lists the phone number, formatted according to - local convention, plus whichever additional elements appear - in the text: - - - ``number`` - the actual number, broken down into sections - as per local convention - - ``national_prefix`` - country code, if detected - - ``area_code`` - region or area code, if detected - - ``extension`` - phone extension (to be dialed after - connection), if detected - ADDRESS (10): - Address - - The metadata identifies the street number and locality plus - whichever additional elements appear in the text: - - - ``street_number`` - street number - - ``locality`` - city or town - - ``street_name`` - street/route name, if detected - - ``postal_code`` - postal code, if detected - - ``country`` - country, if detected< - - ``broad_region`` - administrative area, such as the - state, if detected - - ``narrow_region`` - smaller administrative area, such as - county, if detected - - ``sublocality`` - used in Asian addresses to demark a - district within a city, if detected - DATE (11): - Date - - The metadata identifies the components of the date: - - - ``year`` - four digit year, if detected - - ``month`` - two digit month number, if detected - - ``day`` - two digit day number, if detected - NUMBER (12): - Number - The metadata is the number itself. - PRICE (13): - Price - - The metadata identifies the ``value`` and ``currency``. - """ - UNKNOWN = 0 - PERSON = 1 - LOCATION = 2 - ORGANIZATION = 3 - EVENT = 4 - WORK_OF_ART = 5 - CONSUMER_GOOD = 6 - OTHER = 7 - PHONE_NUMBER = 9 - ADDRESS = 10 - DATE = 11 - NUMBER = 12 - PRICE = 13 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: Type = proto.Field( - proto.ENUM, - number=2, - enum=Type, - ) - metadata: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - salience: float = proto.Field( - proto.FLOAT, - number=4, - ) - mentions: MutableSequence['EntityMention'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='EntityMention', - ) - sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=6, - message='Sentiment', - ) - - -class Token(proto.Message): - r"""Represents the smallest syntactic building block of the text. - - Attributes: - text (google.cloud.language_v1beta2.types.TextSpan): - The token text. - part_of_speech (google.cloud.language_v1beta2.types.PartOfSpeech): - Parts of speech tag for this token. - dependency_edge (google.cloud.language_v1beta2.types.DependencyEdge): - Dependency tree parse for this token. - lemma (str): - `Lemma `__ - of the token. - """ - - text: 'TextSpan' = proto.Field( - proto.MESSAGE, - number=1, - message='TextSpan', - ) - part_of_speech: 'PartOfSpeech' = proto.Field( - proto.MESSAGE, - number=2, - message='PartOfSpeech', - ) - dependency_edge: 'DependencyEdge' = proto.Field( - proto.MESSAGE, - number=3, - message='DependencyEdge', - ) - lemma: str = proto.Field( - proto.STRING, - number=4, - ) - - -class Sentiment(proto.Message): - r"""Represents the feeling associated with the entire text or - entities in the text. - Next ID: 6 - - Attributes: - magnitude (float): - A non-negative number in the [0, +inf) range, which - represents the absolute magnitude of sentiment regardless of - score (positive or negative). - score (float): - Sentiment score between -1.0 (negative - sentiment) and 1.0 (positive sentiment). - """ - - magnitude: float = proto.Field( - proto.FLOAT, - number=2, - ) - score: float = proto.Field( - proto.FLOAT, - number=3, - ) - - -class PartOfSpeech(proto.Message): - r"""Represents part of speech information for a token. - - Attributes: - tag (google.cloud.language_v1beta2.types.PartOfSpeech.Tag): - The part of speech tag. - aspect (google.cloud.language_v1beta2.types.PartOfSpeech.Aspect): - The grammatical aspect. - case (google.cloud.language_v1beta2.types.PartOfSpeech.Case): - The grammatical case. - form (google.cloud.language_v1beta2.types.PartOfSpeech.Form): - The grammatical form. - gender (google.cloud.language_v1beta2.types.PartOfSpeech.Gender): - The grammatical gender. - mood (google.cloud.language_v1beta2.types.PartOfSpeech.Mood): - The grammatical mood. - number (google.cloud.language_v1beta2.types.PartOfSpeech.Number): - The grammatical number. - person (google.cloud.language_v1beta2.types.PartOfSpeech.Person): - The grammatical person. - proper (google.cloud.language_v1beta2.types.PartOfSpeech.Proper): - The grammatical properness. - reciprocity (google.cloud.language_v1beta2.types.PartOfSpeech.Reciprocity): - The grammatical reciprocity. - tense (google.cloud.language_v1beta2.types.PartOfSpeech.Tense): - The grammatical tense. - voice (google.cloud.language_v1beta2.types.PartOfSpeech.Voice): - The grammatical voice. - """ - class Tag(proto.Enum): - r"""The part of speech tags enum. - - Values: - UNKNOWN (0): - Unknown - ADJ (1): - Adjective - ADP (2): - Adposition (preposition and postposition) - ADV (3): - Adverb - CONJ (4): - Conjunction - DET (5): - Determiner - NOUN (6): - Noun (common and proper) - NUM (7): - Cardinal number - PRON (8): - Pronoun - PRT (9): - Particle or other function word - PUNCT (10): - Punctuation - VERB (11): - Verb (all tenses and modes) - X (12): - Other: foreign words, typos, abbreviations - AFFIX (13): - Affix - """ - UNKNOWN = 0 - ADJ = 1 - ADP = 2 - ADV = 3 - CONJ = 4 - DET = 5 - NOUN = 6 - NUM = 7 - PRON = 8 - PRT = 9 - PUNCT = 10 - VERB = 11 - X = 12 - AFFIX = 13 - - class Aspect(proto.Enum): - r"""The characteristic of a verb that expresses time flow during - an event. - - Values: - ASPECT_UNKNOWN (0): - Aspect is not applicable in the analyzed - language or is not predicted. - PERFECTIVE (1): - Perfective - IMPERFECTIVE (2): - Imperfective - PROGRESSIVE (3): - Progressive - """ - ASPECT_UNKNOWN = 0 - PERFECTIVE = 1 - IMPERFECTIVE = 2 - PROGRESSIVE = 3 - - class Case(proto.Enum): - r"""The grammatical function performed by a noun or pronoun in a - phrase, clause, or sentence. In some languages, other parts of - speech, such as adjective and determiner, take case inflection - in agreement with the noun. - - Values: - CASE_UNKNOWN (0): - Case is not applicable in the analyzed - language or is not predicted. - ACCUSATIVE (1): - Accusative - ADVERBIAL (2): - Adverbial - COMPLEMENTIVE (3): - Complementive - DATIVE (4): - Dative - GENITIVE (5): - Genitive - INSTRUMENTAL (6): - Instrumental - LOCATIVE (7): - Locative - NOMINATIVE (8): - Nominative - OBLIQUE (9): - Oblique - PARTITIVE (10): - Partitive - PREPOSITIONAL (11): - Prepositional - REFLEXIVE_CASE (12): - Reflexive - RELATIVE_CASE (13): - Relative - VOCATIVE (14): - Vocative - """ - CASE_UNKNOWN = 0 - ACCUSATIVE = 1 - ADVERBIAL = 2 - COMPLEMENTIVE = 3 - DATIVE = 4 - GENITIVE = 5 - INSTRUMENTAL = 6 - LOCATIVE = 7 - NOMINATIVE = 8 - OBLIQUE = 9 - PARTITIVE = 10 - PREPOSITIONAL = 11 - REFLEXIVE_CASE = 12 - RELATIVE_CASE = 13 - VOCATIVE = 14 - - class Form(proto.Enum): - r"""Depending on the language, Form can be categorizing different - forms of verbs, adjectives, adverbs, etc. For example, - categorizing inflected endings of verbs and adjectives or - distinguishing between short and long forms of adjectives and - participles - - Values: - FORM_UNKNOWN (0): - Form is not applicable in the analyzed - language or is not predicted. - ADNOMIAL (1): - Adnomial - AUXILIARY (2): - Auxiliary - COMPLEMENTIZER (3): - Complementizer - FINAL_ENDING (4): - Final ending - GERUND (5): - Gerund - REALIS (6): - Realis - IRREALIS (7): - Irrealis - SHORT (8): - Short form - LONG (9): - Long form - ORDER (10): - Order form - SPECIFIC (11): - Specific form - """ - FORM_UNKNOWN = 0 - ADNOMIAL = 1 - AUXILIARY = 2 - COMPLEMENTIZER = 3 - FINAL_ENDING = 4 - GERUND = 5 - REALIS = 6 - IRREALIS = 7 - SHORT = 8 - LONG = 9 - ORDER = 10 - SPECIFIC = 11 - - class Gender(proto.Enum): - r"""Gender classes of nouns reflected in the behaviour of - associated words. - - Values: - GENDER_UNKNOWN (0): - Gender is not applicable in the analyzed - language or is not predicted. - FEMININE (1): - Feminine - MASCULINE (2): - Masculine - NEUTER (3): - Neuter - """ - GENDER_UNKNOWN = 0 - FEMININE = 1 - MASCULINE = 2 - NEUTER = 3 - - class Mood(proto.Enum): - r"""The grammatical feature of verbs, used for showing modality - and attitude. - - Values: - MOOD_UNKNOWN (0): - Mood is not applicable in the analyzed - language or is not predicted. - CONDITIONAL_MOOD (1): - Conditional - IMPERATIVE (2): - Imperative - INDICATIVE (3): - Indicative - INTERROGATIVE (4): - Interrogative - JUSSIVE (5): - Jussive - SUBJUNCTIVE (6): - Subjunctive - """ - MOOD_UNKNOWN = 0 - CONDITIONAL_MOOD = 1 - IMPERATIVE = 2 - INDICATIVE = 3 - INTERROGATIVE = 4 - JUSSIVE = 5 - SUBJUNCTIVE = 6 - - class Number(proto.Enum): - r"""Count distinctions. - - Values: - NUMBER_UNKNOWN (0): - Number is not applicable in the analyzed - language or is not predicted. - SINGULAR (1): - Singular - PLURAL (2): - Plural - DUAL (3): - Dual - """ - NUMBER_UNKNOWN = 0 - SINGULAR = 1 - PLURAL = 2 - DUAL = 3 - - class Person(proto.Enum): - r"""The distinction between the speaker, second person, third - person, etc. - - Values: - PERSON_UNKNOWN (0): - Person is not applicable in the analyzed - language or is not predicted. - FIRST (1): - First - SECOND (2): - Second - THIRD (3): - Third - REFLEXIVE_PERSON (4): - Reflexive - """ - PERSON_UNKNOWN = 0 - FIRST = 1 - SECOND = 2 - THIRD = 3 - REFLEXIVE_PERSON = 4 - - class Proper(proto.Enum): - r"""This category shows if the token is part of a proper name. - - Values: - PROPER_UNKNOWN (0): - Proper is not applicable in the analyzed - language or is not predicted. - PROPER (1): - Proper - NOT_PROPER (2): - Not proper - """ - PROPER_UNKNOWN = 0 - PROPER = 1 - NOT_PROPER = 2 - - class Reciprocity(proto.Enum): - r"""Reciprocal features of a pronoun. - - Values: - RECIPROCITY_UNKNOWN (0): - Reciprocity is not applicable in the analyzed - language or is not predicted. - RECIPROCAL (1): - Reciprocal - NON_RECIPROCAL (2): - Non-reciprocal - """ - RECIPROCITY_UNKNOWN = 0 - RECIPROCAL = 1 - NON_RECIPROCAL = 2 - - class Tense(proto.Enum): - r"""Time reference. - - Values: - TENSE_UNKNOWN (0): - Tense is not applicable in the analyzed - language or is not predicted. - CONDITIONAL_TENSE (1): - Conditional - FUTURE (2): - Future - PAST (3): - Past - PRESENT (4): - Present - IMPERFECT (5): - Imperfect - PLUPERFECT (6): - Pluperfect - """ - TENSE_UNKNOWN = 0 - CONDITIONAL_TENSE = 1 - FUTURE = 2 - PAST = 3 - PRESENT = 4 - IMPERFECT = 5 - PLUPERFECT = 6 - - class Voice(proto.Enum): - r"""The relationship between the action that a verb expresses and - the participants identified by its arguments. - - Values: - VOICE_UNKNOWN (0): - Voice is not applicable in the analyzed - language or is not predicted. - ACTIVE (1): - Active - CAUSATIVE (2): - Causative - PASSIVE (3): - Passive - """ - VOICE_UNKNOWN = 0 - ACTIVE = 1 - CAUSATIVE = 2 - PASSIVE = 3 - - tag: Tag = proto.Field( - proto.ENUM, - number=1, - enum=Tag, - ) - aspect: Aspect = proto.Field( - proto.ENUM, - number=2, - enum=Aspect, - ) - case: Case = proto.Field( - proto.ENUM, - number=3, - enum=Case, - ) - form: Form = proto.Field( - proto.ENUM, - number=4, - enum=Form, - ) - gender: Gender = proto.Field( - proto.ENUM, - number=5, - enum=Gender, - ) - mood: Mood = proto.Field( - proto.ENUM, - number=6, - enum=Mood, - ) - number: Number = proto.Field( - proto.ENUM, - number=7, - enum=Number, - ) - person: Person = proto.Field( - proto.ENUM, - number=8, - enum=Person, - ) - proper: Proper = proto.Field( - proto.ENUM, - number=9, - enum=Proper, - ) - reciprocity: Reciprocity = proto.Field( - proto.ENUM, - number=10, - enum=Reciprocity, - ) - tense: Tense = proto.Field( - proto.ENUM, - number=11, - enum=Tense, - ) - voice: Voice = proto.Field( - proto.ENUM, - number=12, - enum=Voice, - ) - - -class DependencyEdge(proto.Message): - r"""Represents dependency parse tree information for a token. - - Attributes: - head_token_index (int): - Represents the head of this token in the dependency tree. - This is the index of the token which has an arc going to - this token. The index is the position of the token in the - array of tokens returned by the API method. If this token is - a root token, then the ``head_token_index`` is its own - index. - label (google.cloud.language_v1beta2.types.DependencyEdge.Label): - The parse label for the token. - """ - class Label(proto.Enum): - r"""The parse label enum for the token. - - Values: - UNKNOWN (0): - Unknown - ABBREV (1): - Abbreviation modifier - ACOMP (2): - Adjectival complement - ADVCL (3): - Adverbial clause modifier - ADVMOD (4): - Adverbial modifier - AMOD (5): - Adjectival modifier of an NP - APPOS (6): - Appositional modifier of an NP - ATTR (7): - Attribute dependent of a copular verb - AUX (8): - Auxiliary (non-main) verb - AUXPASS (9): - Passive auxiliary - CC (10): - Coordinating conjunction - CCOMP (11): - Clausal complement of a verb or adjective - CONJ (12): - Conjunct - CSUBJ (13): - Clausal subject - CSUBJPASS (14): - Clausal passive subject - DEP (15): - Dependency (unable to determine) - DET (16): - Determiner - DISCOURSE (17): - Discourse - DOBJ (18): - Direct object - EXPL (19): - Expletive - GOESWITH (20): - Goes with (part of a word in a text not well - edited) - IOBJ (21): - Indirect object - MARK (22): - Marker (word introducing a subordinate - clause) - MWE (23): - Multi-word expression - MWV (24): - Multi-word verbal expression - NEG (25): - Negation modifier - NN (26): - Noun compound modifier - NPADVMOD (27): - Noun phrase used as an adverbial modifier - NSUBJ (28): - Nominal subject - NSUBJPASS (29): - Passive nominal subject - NUM (30): - Numeric modifier of a noun - NUMBER (31): - Element of compound number - P (32): - Punctuation mark - PARATAXIS (33): - Parataxis relation - PARTMOD (34): - Participial modifier - PCOMP (35): - The complement of a preposition is a clause - POBJ (36): - Object of a preposition - POSS (37): - Possession modifier - POSTNEG (38): - Postverbal negative particle - PRECOMP (39): - Predicate complement - PRECONJ (40): - Preconjunt - PREDET (41): - Predeterminer - PREF (42): - Prefix - PREP (43): - Prepositional modifier - PRONL (44): - The relationship between a verb and verbal - morpheme - PRT (45): - Particle - PS (46): - Associative or possessive marker - QUANTMOD (47): - Quantifier phrase modifier - RCMOD (48): - Relative clause modifier - RCMODREL (49): - Complementizer in relative clause - RDROP (50): - Ellipsis without a preceding predicate - REF (51): - Referent - REMNANT (52): - Remnant - REPARANDUM (53): - Reparandum - ROOT (54): - Root - SNUM (55): - Suffix specifying a unit of number - SUFF (56): - Suffix - TMOD (57): - Temporal modifier - TOPIC (58): - Topic marker - VMOD (59): - Clause headed by an infinite form of the verb - that modifies a noun - VOCATIVE (60): - Vocative - XCOMP (61): - Open clausal complement - SUFFIX (62): - Name suffix - TITLE (63): - Name title - ADVPHMOD (64): - Adverbial phrase modifier - AUXCAUS (65): - Causative auxiliary - AUXVV (66): - Helper auxiliary - DTMOD (67): - Rentaishi (Prenominal modifier) - FOREIGN (68): - Foreign words - KW (69): - Keyword - LIST (70): - List for chains of comparable items - NOMC (71): - Nominalized clause - NOMCSUBJ (72): - Nominalized clausal subject - NOMCSUBJPASS (73): - Nominalized clausal passive - NUMC (74): - Compound of numeric modifier - COP (75): - Copula - DISLOCATED (76): - Dislocated relation (for fronted/topicalized - elements) - ASP (77): - Aspect marker - GMOD (78): - Genitive modifier - GOBJ (79): - Genitive object - INFMOD (80): - Infinitival modifier - MES (81): - Measure - NCOMP (82): - Nominal complement of a noun - """ - UNKNOWN = 0 - ABBREV = 1 - ACOMP = 2 - ADVCL = 3 - ADVMOD = 4 - AMOD = 5 - APPOS = 6 - ATTR = 7 - AUX = 8 - AUXPASS = 9 - CC = 10 - CCOMP = 11 - CONJ = 12 - CSUBJ = 13 - CSUBJPASS = 14 - DEP = 15 - DET = 16 - DISCOURSE = 17 - DOBJ = 18 - EXPL = 19 - GOESWITH = 20 - IOBJ = 21 - MARK = 22 - MWE = 23 - MWV = 24 - NEG = 25 - NN = 26 - NPADVMOD = 27 - NSUBJ = 28 - NSUBJPASS = 29 - NUM = 30 - NUMBER = 31 - P = 32 - PARATAXIS = 33 - PARTMOD = 34 - PCOMP = 35 - POBJ = 36 - POSS = 37 - POSTNEG = 38 - PRECOMP = 39 - PRECONJ = 40 - PREDET = 41 - PREF = 42 - PREP = 43 - PRONL = 44 - PRT = 45 - PS = 46 - QUANTMOD = 47 - RCMOD = 48 - RCMODREL = 49 - RDROP = 50 - REF = 51 - REMNANT = 52 - REPARANDUM = 53 - ROOT = 54 - SNUM = 55 - SUFF = 56 - TMOD = 57 - TOPIC = 58 - VMOD = 59 - VOCATIVE = 60 - XCOMP = 61 - SUFFIX = 62 - TITLE = 63 - ADVPHMOD = 64 - AUXCAUS = 65 - AUXVV = 66 - DTMOD = 67 - FOREIGN = 68 - KW = 69 - LIST = 70 - NOMC = 71 - NOMCSUBJ = 72 - NOMCSUBJPASS = 73 - NUMC = 74 - COP = 75 - DISLOCATED = 76 - ASP = 77 - GMOD = 78 - GOBJ = 79 - INFMOD = 80 - MES = 81 - NCOMP = 82 - - head_token_index: int = proto.Field( - proto.INT32, - number=1, - ) - label: Label = proto.Field( - proto.ENUM, - number=2, - enum=Label, - ) - - -class EntityMention(proto.Message): - r"""Represents a mention for an entity in the text. Currently, - proper noun mentions are supported. - - Attributes: - text (google.cloud.language_v1beta2.types.TextSpan): - The mention text. - type_ (google.cloud.language_v1beta2.types.EntityMention.Type): - The type of the entity mention. - sentiment (google.cloud.language_v1beta2.types.Sentiment): - For calls to [AnalyzeEntitySentiment][] or if - [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] - is set to true, this field will contain the sentiment - expressed for this mention of the entity in the provided - document. - """ - class Type(proto.Enum): - r"""The supported types of mentions. - - Values: - TYPE_UNKNOWN (0): - Unknown - PROPER (1): - Proper name - COMMON (2): - Common noun (or noun compound) - """ - TYPE_UNKNOWN = 0 - PROPER = 1 - COMMON = 2 - - text: 'TextSpan' = proto.Field( - proto.MESSAGE, - number=1, - message='TextSpan', - ) - type_: Type = proto.Field( - proto.ENUM, - number=2, - enum=Type, - ) - sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=3, - message='Sentiment', - ) - - -class TextSpan(proto.Message): - r"""Represents an output piece of text. - - Attributes: - content (str): - The content of the output text. - begin_offset (int): - The API calculates the beginning offset of the content in - the original document according to the - [EncodingType][google.cloud.language.v1beta2.EncodingType] - specified in the API request. - """ - - content: str = proto.Field( - proto.STRING, - number=1, - ) - begin_offset: int = proto.Field( - proto.INT32, - number=2, - ) - - -class ClassificationCategory(proto.Message): - r"""Represents a category returned from the text classifier. - - Attributes: - name (str): - The name of the category representing the - document. - confidence (float): - The classifier's confidence of the category. - Number represents how certain the classifier is - that this category represents the given text. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - confidence: float = proto.Field( - proto.FLOAT, - number=2, - ) - - -class ClassificationModelOptions(proto.Message): - r"""Model options available for classification requests. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - v1_model (google.cloud.language_v1beta2.types.ClassificationModelOptions.V1Model): - Setting this field will use the V1 model and - V1 content categories version. The V1 model is a - legacy model; support for this will be - discontinued in the future. - - This field is a member of `oneof`_ ``model_type``. - v2_model (google.cloud.language_v1beta2.types.ClassificationModelOptions.V2Model): - Setting this field will use the V2 model with - the appropriate content categories version. The - V2 model is a better performing model. - - This field is a member of `oneof`_ ``model_type``. - """ - - class V1Model(proto.Message): - r"""Options for the V1 model. - """ - - class V2Model(proto.Message): - r"""Options for the V2 model. - - Attributes: - content_categories_version (google.cloud.language_v1beta2.types.ClassificationModelOptions.V2Model.ContentCategoriesVersion): - The content categories used for - classification. - """ - class ContentCategoriesVersion(proto.Enum): - r"""The content categories used for classification. - - Values: - CONTENT_CATEGORIES_VERSION_UNSPECIFIED (0): - If ``ContentCategoriesVersion`` is not specified, this - option will default to ``V1``. - V1 (1): - Legacy content categories of our initial - launch in 2017. - V2 (2): - Updated content categories in 2022. - """ - CONTENT_CATEGORIES_VERSION_UNSPECIFIED = 0 - V1 = 1 - V2 = 2 - - content_categories_version: 'ClassificationModelOptions.V2Model.ContentCategoriesVersion' = proto.Field( - proto.ENUM, - number=1, - enum='ClassificationModelOptions.V2Model.ContentCategoriesVersion', - ) - - v1_model: V1Model = proto.Field( - proto.MESSAGE, - number=1, - oneof='model_type', - message=V1Model, - ) - v2_model: V2Model = proto.Field( - proto.MESSAGE, - number=2, - oneof='model_type', - message=V2Model, - ) - - -class AnalyzeSentimentRequest(proto.Message): - r"""The sentiment analysis request message. - - Attributes: - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate sentence offsets for the sentence - sentiment. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeSentimentResponse(proto.Message): - r"""The sentiment analysis response message. - - Attributes: - document_sentiment (google.cloud.language_v1beta2.types.Sentiment): - The overall sentiment of the input document. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1beta2.Document.language] - field for more details. - sentences (MutableSequence[google.cloud.language_v1beta2.types.Sentence]): - The sentiment for all the sentences in the - document. - """ - - document_sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=1, - message='Sentiment', - ) - language: str = proto.Field( - proto.STRING, - number=2, - ) - sentences: MutableSequence['Sentence'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Sentence', - ) - - -class AnalyzeEntitySentimentRequest(proto.Message): - r"""The entity-level sentiment analysis request message. - - Attributes: - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeEntitySentimentResponse(proto.Message): - r"""The entity-level sentiment analysis response message. - - Attributes: - entities (MutableSequence[google.cloud.language_v1beta2.types.Entity]): - The recognized entities in the input document - with associated sentiments. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1beta2.Document.language] - field for more details. - """ - - entities: MutableSequence['Entity'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entity', - ) - language: str = proto.Field( - proto.STRING, - number=2, - ) - - -class AnalyzeEntitiesRequest(proto.Message): - r"""The entity analysis request message. - - Attributes: - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeEntitiesResponse(proto.Message): - r"""The entity analysis response message. - - Attributes: - entities (MutableSequence[google.cloud.language_v1beta2.types.Entity]): - The recognized entities in the input - document. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1beta2.Document.language] - field for more details. - """ - - entities: MutableSequence['Entity'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entity', - ) - language: str = proto.Field( - proto.STRING, - number=2, - ) - - -class AnalyzeSyntaxRequest(proto.Message): - r"""The syntax analysis request message. - - Attributes: - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=2, - enum='EncodingType', - ) - - -class AnalyzeSyntaxResponse(proto.Message): - r"""The syntax analysis response message. - - Attributes: - sentences (MutableSequence[google.cloud.language_v1beta2.types.Sentence]): - Sentences in the input document. - tokens (MutableSequence[google.cloud.language_v1beta2.types.Token]): - Tokens, along with their syntactic - information, in the input document. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1beta2.Document.language] - field for more details. - """ - - sentences: MutableSequence['Sentence'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Sentence', - ) - tokens: MutableSequence['Token'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='Token', - ) - language: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ClassifyTextRequest(proto.Message): - r"""The document classification request message. - - Attributes: - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - classification_model_options (google.cloud.language_v1beta2.types.ClassificationModelOptions): - Model options to use for classification. - Defaults to v1 options if not specified. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - classification_model_options: 'ClassificationModelOptions' = proto.Field( - proto.MESSAGE, - number=3, - message='ClassificationModelOptions', - ) - - -class ClassifyTextResponse(proto.Message): - r"""The document classification response message. - - Attributes: - categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): - Categories representing the input document. - """ - - categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ClassificationCategory', - ) - - -class ModerateTextRequest(proto.Message): - r"""The document moderation request message. - - Attributes: - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - """ - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - - -class ModerateTextResponse(proto.Message): - r"""The document moderation response message. - - Attributes: - moderation_categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): - Harmful and sensitive categories representing - the input document. - """ - - moderation_categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ClassificationCategory', - ) - - -class AnnotateTextRequest(proto.Message): - r"""The request message for the text annotation API, which can - perform multiple analysis types (sentiment, entities, and - syntax) in one call. - - Attributes: - document (google.cloud.language_v1beta2.types.Document): - Required. Input document. - features (google.cloud.language_v1beta2.types.AnnotateTextRequest.Features): - Required. The enabled features. - encoding_type (google.cloud.language_v1beta2.types.EncodingType): - The encoding type used by the API to - calculate offsets. - """ - - class Features(proto.Message): - r"""All available features for sentiment, syntax, and semantic - analysis. Setting each one to true will enable that specific - analysis for the input. Next ID: 12 - - Attributes: - extract_syntax (bool): - Extract syntax information. - extract_entities (bool): - Extract entities. - extract_document_sentiment (bool): - Extract document-level sentiment. - extract_entity_sentiment (bool): - Extract entities and their associated - sentiment. - classify_text (bool): - Classify the full document into categories. If this is true, - the API will use the default model which classifies into a - `predefined - taxonomy `__. - moderate_text (bool): - Moderate the document for harmful and - sensitive categories. - classification_model_options (google.cloud.language_v1beta2.types.ClassificationModelOptions): - The model options to use for classification. Defaults to v1 - options if not specified. Only used if ``classify_text`` is - set to true. - """ - - extract_syntax: bool = proto.Field( - proto.BOOL, - number=1, - ) - extract_entities: bool = proto.Field( - proto.BOOL, - number=2, - ) - extract_document_sentiment: bool = proto.Field( - proto.BOOL, - number=3, - ) - extract_entity_sentiment: bool = proto.Field( - proto.BOOL, - number=4, - ) - classify_text: bool = proto.Field( - proto.BOOL, - number=6, - ) - moderate_text: bool = proto.Field( - proto.BOOL, - number=11, - ) - classification_model_options: 'ClassificationModelOptions' = proto.Field( - proto.MESSAGE, - number=10, - message='ClassificationModelOptions', - ) - - document: 'Document' = proto.Field( - proto.MESSAGE, - number=1, - message='Document', - ) - features: Features = proto.Field( - proto.MESSAGE, - number=2, - message=Features, - ) - encoding_type: 'EncodingType' = proto.Field( - proto.ENUM, - number=3, - enum='EncodingType', - ) - - -class AnnotateTextResponse(proto.Message): - r"""The text annotations response message. - - Attributes: - sentences (MutableSequence[google.cloud.language_v1beta2.types.Sentence]): - Sentences in the input document. Populated if the user - enables - [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax]. - tokens (MutableSequence[google.cloud.language_v1beta2.types.Token]): - Tokens, along with their syntactic information, in the input - document. Populated if the user enables - [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax]. - entities (MutableSequence[google.cloud.language_v1beta2.types.Entity]): - Entities, along with their semantic information, in the - input document. Populated if the user enables - [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entities]. - document_sentiment (google.cloud.language_v1beta2.types.Sentiment): - The overall sentiment for the document. Populated if the - user enables - [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment]. - language (str): - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See - [Document.language][google.cloud.language.v1beta2.Document.language] - field for more details. - categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): - Categories identified in the input document. - moderation_categories (MutableSequence[google.cloud.language_v1beta2.types.ClassificationCategory]): - Harmful and sensitive categories identified - in the input document. - """ - - sentences: MutableSequence['Sentence'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Sentence', - ) - tokens: MutableSequence['Token'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='Token', - ) - entities: MutableSequence['Entity'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Entity', - ) - document_sentiment: 'Sentiment' = proto.Field( - proto.MESSAGE, - number=4, - message='Sentiment', - ) - language: str = proto.Field( - proto.STRING, - number=5, - ) - categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='ClassificationCategory', - ) - moderation_categories: MutableSequence['ClassificationCategory'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='ClassificationCategory', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta2/mypy.ini b/owl-bot-staging/v1beta2/mypy.ini deleted file mode 100644 index 574c5aed..00000000 --- a/owl-bot-staging/v1beta2/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v1beta2/noxfile.py b/owl-bot-staging/v1beta2/noxfile.py deleted file mode 100644 index 95cd6c8b..00000000 --- a/owl-bot-staging/v1beta2/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/language_v1beta2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py deleted file mode 100644 index ef2d4a6d..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeEntities_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -async def sample_analyze_entities(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entities(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeEntities_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py deleted file mode 100644 index b8c2694b..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entities_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeEntities_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -def sample_analyze_entities(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitiesRequest( - document=document, - ) - - # Make the request - response = client.analyze_entities(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeEntities_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py deleted file mode 100644 index 818d4209..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntitySentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -async def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py deleted file mode 100644 index cabc3ff5..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeEntitySentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -def sample_analyze_entity_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeEntitySentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_entity_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py deleted file mode 100644 index b60e606a..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeSentiment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -async def sample_analyze_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = await client.analyze_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeSentiment_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py deleted file mode 100644 index df735913..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_sentiment_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSentiment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeSentiment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -def sample_analyze_sentiment(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSentimentRequest( - document=document, - ) - - # Make the request - response = client.analyze_sentiment(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeSentiment_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py deleted file mode 100644 index e42a0728..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSyntax -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeSyntax_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -async def sample_analyze_syntax(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = await client.analyze_syntax(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeSyntax_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py deleted file mode 100644 index f9ed77cc..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_analyze_syntax_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeSyntax -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnalyzeSyntax_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -def sample_analyze_syntax(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnalyzeSyntaxRequest( - document=document, - ) - - # Make the request - response = client.analyze_syntax(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnalyzeSyntax_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py deleted file mode 100644 index 5b17e2b1..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnnotateText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnnotateText_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -async def sample_annotate_text(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = await client.annotate_text(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnnotateText_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py deleted file mode 100644 index 701c94e5..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_annotate_text_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnnotateText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_AnnotateText_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -def sample_annotate_text(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.AnnotateTextRequest( - document=document, - ) - - # Make the request - response = client.annotate_text(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_AnnotateText_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py deleted file mode 100644 index 94b5ebcc..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ClassifyText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_ClassifyText_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -async def sample_classify_text(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = await client.classify_text(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_ClassifyText_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py deleted file mode 100644 index f9415093..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_classify_text_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ClassifyText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_ClassifyText_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -def sample_classify_text(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.ClassifyTextRequest( - document=document, - ) - - # Make the request - response = client.classify_text(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_ClassifyText_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py deleted file mode 100644 index d8385285..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ModerateText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_ModerateText_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -async def sample_moderate_text(): - # Create a client - client = language_v1beta2.LanguageServiceAsyncClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.ModerateTextRequest( - document=document, - ) - - # Make the request - response = await client.moderate_text(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_ModerateText_async] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py b/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py deleted file mode 100644 index 78d11521..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/language_v1beta2_generated_language_service_moderate_text_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ModerateText -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-language - - -# [START language_v1beta2_generated_LanguageService_ModerateText_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import language_v1beta2 - - -def sample_moderate_text(): - # Create a client - client = language_v1beta2.LanguageServiceClient() - - # Initialize request argument(s) - document = language_v1beta2.Document() - document.content = "content_value" - - request = language_v1beta2.ModerateTextRequest( - document=document, - ) - - # Make the request - response = client.moderate_text(request=request) - - # Handle the response - print(response) - -# [END language_v1beta2_generated_LanguageService_ModerateText_sync] diff --git a/owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json b/owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json deleted file mode 100644 index fb6633f2..00000000 --- a/owl-bot-staging/v1beta2/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json +++ /dev/null @@ -1,1190 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.language.v1beta2", - "version": "v1beta2" - } - ], - "language": "PYTHON", - "name": "google-cloud-language", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_entities", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntities", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse", - "shortName": "analyze_entities" - }, - "description": "Sample for AnalyzeEntities", - "file": "language_v1beta2_generated_language_service_analyze_entities_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntities_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_entities_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_entities", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntities", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse", - "shortName": "analyze_entities" - }, - "description": "Sample for AnalyzeEntities", - "file": "language_v1beta2_generated_language_service_analyze_entities_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntities_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_entities_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_entity_sentiment", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntitySentiment", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntitySentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse", - "shortName": "analyze_entity_sentiment" - }, - "description": "Sample for AnalyzeEntitySentiment", - "file": "language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_entity_sentiment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_entity_sentiment", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeEntitySentiment", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeEntitySentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse", - "shortName": "analyze_entity_sentiment" - }, - "description": "Sample for AnalyzeEntitySentiment", - "file": "language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeEntitySentiment_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_entity_sentiment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_sentiment", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSentiment", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeSentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeSentimentResponse", - "shortName": "analyze_sentiment" - }, - "description": "Sample for AnalyzeSentiment", - "file": "language_v1beta2_generated_language_service_analyze_sentiment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSentiment_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_sentiment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_sentiment", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSentiment", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSentiment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeSentimentRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeSentimentResponse", - "shortName": "analyze_sentiment" - }, - "description": "Sample for AnalyzeSentiment", - "file": "language_v1beta2_generated_language_service_analyze_sentiment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSentiment_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_sentiment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.analyze_syntax", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSyntax", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSyntax" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse", - "shortName": "analyze_syntax" - }, - "description": "Sample for AnalyzeSyntax", - "file": "language_v1beta2_generated_language_service_analyze_syntax_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSyntax_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_syntax_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.analyze_syntax", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnalyzeSyntax", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnalyzeSyntax" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse", - "shortName": "analyze_syntax" - }, - "description": "Sample for AnalyzeSyntax", - "file": "language_v1beta2_generated_language_service_analyze_syntax_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnalyzeSyntax_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_analyze_syntax_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.annotate_text", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnnotateText", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnnotateText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "features", - "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest.Features" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnnotateTextResponse", - "shortName": "annotate_text" - }, - "description": "Sample for AnnotateText", - "file": "language_v1beta2_generated_language_service_annotate_text_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnnotateText_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_annotate_text_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.annotate_text", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.AnnotateText", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "AnnotateText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "features", - "type": "google.cloud.language_v1beta2.types.AnnotateTextRequest.Features" - }, - { - "name": "encoding_type", - "type": "google.cloud.language_v1beta2.types.EncodingType" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.AnnotateTextResponse", - "shortName": "annotate_text" - }, - "description": "Sample for AnnotateText", - "file": "language_v1beta2_generated_language_service_annotate_text_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_AnnotateText_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_annotate_text_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.classify_text", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.ClassifyText", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "ClassifyText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.ClassifyTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.ClassifyTextResponse", - "shortName": "classify_text" - }, - "description": "Sample for ClassifyText", - "file": "language_v1beta2_generated_language_service_classify_text_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_ClassifyText_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_classify_text_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.classify_text", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.ClassifyText", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "ClassifyText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.ClassifyTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.ClassifyTextResponse", - "shortName": "classify_text" - }, - "description": "Sample for ClassifyText", - "file": "language_v1beta2_generated_language_service_classify_text_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_ClassifyText_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_classify_text_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient", - "shortName": "LanguageServiceAsyncClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceAsyncClient.moderate_text", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.ModerateText", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "ModerateText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.ModerateTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.ModerateTextResponse", - "shortName": "moderate_text" - }, - "description": "Sample for ModerateText", - "file": "language_v1beta2_generated_language_service_moderate_text_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_ModerateText_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_moderate_text_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient", - "shortName": "LanguageServiceClient" - }, - "fullName": "google.cloud.language_v1beta2.LanguageServiceClient.moderate_text", - "method": { - "fullName": "google.cloud.language.v1beta2.LanguageService.ModerateText", - "service": { - "fullName": "google.cloud.language.v1beta2.LanguageService", - "shortName": "LanguageService" - }, - "shortName": "ModerateText" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.language_v1beta2.types.ModerateTextRequest" - }, - { - "name": "document", - "type": "google.cloud.language_v1beta2.types.Document" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.language_v1beta2.types.ModerateTextResponse", - "shortName": "moderate_text" - }, - "description": "Sample for ModerateText", - "file": "language_v1beta2_generated_language_service_moderate_text_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "language_v1beta2_generated_LanguageService_ModerateText_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "language_v1beta2_generated_language_service_moderate_text_sync.py" - } - ] -} diff --git a/owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py b/owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py deleted file mode 100644 index 10fa218c..00000000 --- a/owl-bot-staging/v1beta2/scripts/fixup_language_v1beta2_keywords.py +++ /dev/null @@ -1,182 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class languageCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'analyze_entities': ('document', 'encoding_type', ), - 'analyze_entity_sentiment': ('document', 'encoding_type', ), - 'analyze_sentiment': ('document', 'encoding_type', ), - 'analyze_syntax': ('document', 'encoding_type', ), - 'annotate_text': ('document', 'features', 'encoding_type', ), - 'classify_text': ('document', 'classification_model_options', ), - 'moderate_text': ('document', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=languageCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the language client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1beta2/setup.py b/owl-bot-staging/v1beta2/setup.py deleted file mode 100644 index 047e5bce..00000000 --- a/owl-bot-staging/v1beta2/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-language' - - -description = "Google Cloud Language API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/language/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/python-language" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.10.txt b/owl-bot-staging/v1beta2/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1beta2/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.11.txt b/owl-bot-staging/v1beta2/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1beta2/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.12.txt b/owl-bot-staging/v1beta2/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1beta2/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.7.txt b/owl-bot-staging/v1beta2/testing/constraints-3.7.txt deleted file mode 100644 index 6c44adfe..00000000 --- a/owl-bot-staging/v1beta2/testing/constraints-3.7.txt +++ /dev/null @@ -1,9 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.8.txt b/owl-bot-staging/v1beta2/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1beta2/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1beta2/testing/constraints-3.9.txt b/owl-bot-staging/v1beta2/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/v1beta2/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1beta2/tests/__init__.py b/owl-bot-staging/v1beta2/tests/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1beta2/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta2/tests/unit/__init__.py b/owl-bot-staging/v1beta2/tests/unit/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1beta2/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py b/owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1beta2/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py b/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py deleted file mode 100644 index 231bc125..00000000 --- a/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py b/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py deleted file mode 100644 index 3e0b7671..00000000 --- a/owl-bot-staging/v1beta2/tests/unit/gapic/language_v1beta2/test_language_service.py +++ /dev/null @@ -1,4070 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.language_v1beta2.services.language_service import LanguageServiceAsyncClient -from google.cloud.language_v1beta2.services.language_service import LanguageServiceClient -from google.cloud.language_v1beta2.services.language_service import transports -from google.cloud.language_v1beta2.types import language_service -from google.oauth2 import service_account -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert LanguageServiceClient._get_default_mtls_endpoint(None) is None - assert LanguageServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert LanguageServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (LanguageServiceClient, "grpc"), - (LanguageServiceAsyncClient, "grpc_asyncio"), - (LanguageServiceClient, "rest"), -]) -def test_language_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'language.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://language.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.LanguageServiceGrpcTransport, "grpc"), - (transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.LanguageServiceRestTransport, "rest"), -]) -def test_language_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (LanguageServiceClient, "grpc"), - (LanguageServiceAsyncClient, "grpc_asyncio"), - (LanguageServiceClient, "rest"), -]) -def test_language_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'language.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://language.googleapis.com' - ) - - -def test_language_service_client_get_transport_class(): - transport = LanguageServiceClient.get_transport_class() - available_transports = [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceRestTransport, - ] - assert transport in available_transports - - transport = LanguageServiceClient.get_transport_class("grpc") - assert transport == transports.LanguageServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), -]) -@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) -@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) -def test_language_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(LanguageServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "true"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", "false"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "true"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) -@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_language_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - LanguageServiceClient, LanguageServiceAsyncClient -]) -@mock.patch.object(LanguageServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceClient)) -@mock.patch.object(LanguageServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LanguageServiceAsyncClient)) -def test_language_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), -]) -def test_language_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", None), -]) -def test_language_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_language_service_client_client_options_from_dict(): - with mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = LanguageServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc", grpc_helpers), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_language_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "language.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="language.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSentimentRequest, - dict, -]) -def test_analyze_sentiment(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSentimentResponse( - language='language_value', - ) - response = client.analyze_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_sentiment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - client.analyze_sentiment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSentimentRequest() - -@pytest.mark.asyncio -async def test_analyze_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSentimentRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse( - language='language_value', - )) - response = await client.analyze_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSentimentResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_sentiment_async_from_dict(): - await test_analyze_sentiment_async(request_type=dict) - - -def test_analyze_sentiment_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSentimentResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_sentiment_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_sentiment( - language_service.AnalyzeSentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_sentiment_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSentimentResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSentimentResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_sentiment_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_sentiment( - language_service.AnalyzeSentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitiesRequest, - dict, -]) -def test_analyze_entities(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitiesResponse( - language='language_value', - ) - response = client.analyze_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitiesResponse) - assert response.language == 'language_value' - - -def test_analyze_entities_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - client.analyze_entities() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitiesRequest() - -@pytest.mark.asyncio -async def test_analyze_entities_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitiesRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse( - language='language_value', - )) - response = await client.analyze_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitiesResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_entities_async_from_dict(): - await test_analyze_entities_async(request_type=dict) - - -def test_analyze_entities_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitiesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_entities( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_entities_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entities( - language_service.AnalyzeEntitiesRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_entities_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitiesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitiesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_entities( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_entities_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_entities( - language_service.AnalyzeEntitiesRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitySentimentRequest, - dict, -]) -def test_analyze_entity_sentiment(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitySentimentResponse( - language='language_value', - ) - response = client.analyze_entity_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitySentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_entity_sentiment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - client.analyze_entity_sentiment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitySentimentRequest() - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeEntitySentimentRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse( - language='language_value', - )) - response = await client.analyze_entity_sentiment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeEntitySentimentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_async_from_dict(): - await test_analyze_entity_sentiment_async(request_type=dict) - - -def test_analyze_entity_sentiment_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitySentimentResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_entity_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_entity_sentiment_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entity_sentiment( - language_service.AnalyzeEntitySentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_entity_sentiment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeEntitySentimentResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeEntitySentimentResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_entity_sentiment( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_entity_sentiment_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_entity_sentiment( - language_service.AnalyzeEntitySentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSyntaxRequest, - dict, -]) -def test_analyze_syntax(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSyntaxResponse( - language='language_value', - ) - response = client.analyze_syntax(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSyntaxRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSyntaxResponse) - assert response.language == 'language_value' - - -def test_analyze_syntax_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - client.analyze_syntax() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSyntaxRequest() - -@pytest.mark.asyncio -async def test_analyze_syntax_async(transport: str = 'grpc_asyncio', request_type=language_service.AnalyzeSyntaxRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse( - language='language_value', - )) - response = await client.analyze_syntax(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnalyzeSyntaxRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSyntaxResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_analyze_syntax_async_from_dict(): - await test_analyze_syntax_async(request_type=dict) - - -def test_analyze_syntax_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSyntaxResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_syntax( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_analyze_syntax_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_syntax( - language_service.AnalyzeSyntaxRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_analyze_syntax_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_syntax), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnalyzeSyntaxResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnalyzeSyntaxResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_syntax( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_syntax_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_syntax( - language_service.AnalyzeSyntaxRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.ClassifyTextRequest, - dict, -]) -def test_classify_text(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ClassifyTextResponse( - ) - response = client.classify_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ClassifyTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ClassifyTextResponse) - - -def test_classify_text_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - client.classify_text() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ClassifyTextRequest() - -@pytest.mark.asyncio -async def test_classify_text_async(transport: str = 'grpc_asyncio', request_type=language_service.ClassifyTextRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse( - )) - response = await client.classify_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ClassifyTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ClassifyTextResponse) - - -@pytest.mark.asyncio -async def test_classify_text_async_from_dict(): - await test_classify_text_async(request_type=dict) - - -def test_classify_text_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ClassifyTextResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.classify_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - - -def test_classify_text_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.classify_text( - language_service.ClassifyTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - -@pytest.mark.asyncio -async def test_classify_text_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.classify_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ClassifyTextResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.ClassifyTextResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.classify_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_classify_text_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.classify_text( - language_service.ClassifyTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.ModerateTextRequest, - dict, -]) -def test_moderate_text(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ModerateTextResponse( - ) - response = client.moderate_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ModerateTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ModerateTextResponse) - - -def test_moderate_text_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - client.moderate_text() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ModerateTextRequest() - -@pytest.mark.asyncio -async def test_moderate_text_async(transport: str = 'grpc_asyncio', request_type=language_service.ModerateTextRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.ModerateTextResponse( - )) - response = await client.moderate_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.ModerateTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ModerateTextResponse) - - -@pytest.mark.asyncio -async def test_moderate_text_async_from_dict(): - await test_moderate_text_async(request_type=dict) - - -def test_moderate_text_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ModerateTextResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.moderate_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - - -def test_moderate_text_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.moderate_text( - language_service.ModerateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - -@pytest.mark.asyncio -async def test_moderate_text_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.moderate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.ModerateTextResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.ModerateTextResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.moderate_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_moderate_text_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.moderate_text( - language_service.ModerateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnnotateTextRequest, - dict, -]) -def test_annotate_text(request_type, transport: str = 'grpc'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnnotateTextResponse( - language='language_value', - ) - response = client.annotate_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnnotateTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnnotateTextResponse) - assert response.language == 'language_value' - - -def test_annotate_text_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - client.annotate_text() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnnotateTextRequest() - -@pytest.mark.asyncio -async def test_annotate_text_async(transport: str = 'grpc_asyncio', request_type=language_service.AnnotateTextRequest): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse( - language='language_value', - )) - response = await client.annotate_text(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == language_service.AnnotateTextRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnnotateTextResponse) - assert response.language == 'language_value' - - -@pytest.mark.asyncio -async def test_annotate_text_async_from_dict(): - await test_annotate_text_async(request_type=dict) - - -def test_annotate_text_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnnotateTextResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.annotate_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].features - mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - - -def test_annotate_text_flattened_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.annotate_text( - language_service.AnnotateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - -@pytest.mark.asyncio -async def test_annotate_text_flattened_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.annotate_text), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = language_service.AnnotateTextResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(language_service.AnnotateTextResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.annotate_text( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].document - mock_val = language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT) - assert arg == mock_val - arg = args[0].features - mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) - assert arg == mock_val - arg = args[0].encoding_type - mock_val = language_service.EncodingType.UTF8 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_annotate_text_flattened_error_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.annotate_text( - language_service.AnnotateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSentimentRequest, - dict, -]) -def test_analyze_sentiment_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSentimentResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_sentiment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_sentiment_rest_required_fields(request_type=language_service.AnalyzeSentimentRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSentimentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_sentiment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_sentiment_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_sentiment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_sentiment_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_sentiment") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_sentiment") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeSentimentRequest.pb(language_service.AnalyzeSentimentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeSentimentResponse.to_json(language_service.AnalyzeSentimentResponse()) - - request = language_service.AnalyzeSentimentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeSentimentResponse() - - client.analyze_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSentimentRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_sentiment(request) - - -def test_analyze_sentiment_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSentimentResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_sentiment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta2/documents:analyzeSentiment" % client.transport._host, args[1]) - - -def test_analyze_sentiment_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_sentiment( - language_service.AnalyzeSentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_sentiment_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitiesRequest, - dict, -]) -def test_analyze_entities_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitiesResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_entities(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitiesResponse) - assert response.language == 'language_value' - - -def test_analyze_entities_rest_required_fields(request_type=language_service.AnalyzeEntitiesRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entities._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitiesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_entities(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_entities_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_entities._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_entities_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entities") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entities") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeEntitiesRequest.pb(language_service.AnalyzeEntitiesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeEntitiesResponse.to_json(language_service.AnalyzeEntitiesResponse()) - - request = language_service.AnalyzeEntitiesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeEntitiesResponse() - - client.analyze_entities(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_entities_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitiesRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_entities(request) - - -def test_analyze_entities_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitiesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_entities(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta2/documents:analyzeEntities" % client.transport._host, args[1]) - - -def test_analyze_entities_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entities( - language_service.AnalyzeEntitiesRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_entities_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeEntitySentimentRequest, - dict, -]) -def test_analyze_entity_sentiment_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitySentimentResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_entity_sentiment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) - assert response.language == 'language_value' - - -def test_analyze_entity_sentiment_rest_required_fields(request_type=language_service.AnalyzeEntitySentimentRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitySentimentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_entity_sentiment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_entity_sentiment_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_entity_sentiment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_entity_sentiment") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_entity_sentiment") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeEntitySentimentRequest.pb(language_service.AnalyzeEntitySentimentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeEntitySentimentResponse.to_json(language_service.AnalyzeEntitySentimentResponse()) - - request = language_service.AnalyzeEntitySentimentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeEntitySentimentResponse() - - client.analyze_entity_sentiment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_entity_sentiment_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeEntitySentimentRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_entity_sentiment(request) - - -def test_analyze_entity_sentiment_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeEntitySentimentResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_entity_sentiment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta2/documents:analyzeEntitySentiment" % client.transport._host, args[1]) - - -def test_analyze_entity_sentiment_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_entity_sentiment( - language_service.AnalyzeEntitySentimentRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_entity_sentiment_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnalyzeSyntaxRequest, - dict, -]) -def test_analyze_syntax_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSyntaxResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_syntax(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnalyzeSyntaxResponse) - assert response.language == 'language_value' - - -def test_analyze_syntax_rest_required_fields(request_type=language_service.AnalyzeSyntaxRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_syntax._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSyntaxResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.analyze_syntax(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_syntax_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_syntax._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_syntax_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_analyze_syntax") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_analyze_syntax") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnalyzeSyntaxRequest.pb(language_service.AnalyzeSyntaxRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnalyzeSyntaxResponse.to_json(language_service.AnalyzeSyntaxResponse()) - - request = language_service.AnalyzeSyntaxRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnalyzeSyntaxResponse() - - client.analyze_syntax(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_syntax_rest_bad_request(transport: str = 'rest', request_type=language_service.AnalyzeSyntaxRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_syntax(request) - - -def test_analyze_syntax_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnalyzeSyntaxResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.analyze_syntax(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta2/documents:analyzeSyntax" % client.transport._host, args[1]) - - -def test_analyze_syntax_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_syntax( - language_service.AnalyzeSyntaxRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_analyze_syntax_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.ClassifyTextRequest, - dict, -]) -def test_classify_text_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.ClassifyTextResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.ClassifyTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.classify_text(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ClassifyTextResponse) - - -def test_classify_text_rest_required_fields(request_type=language_service.ClassifyTextRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).classify_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.ClassifyTextResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.ClassifyTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.classify_text(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_classify_text_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.classify_text._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_classify_text_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_classify_text") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_classify_text") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.ClassifyTextRequest.pb(language_service.ClassifyTextRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.ClassifyTextResponse.to_json(language_service.ClassifyTextResponse()) - - request = language_service.ClassifyTextRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.ClassifyTextResponse() - - client.classify_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_classify_text_rest_bad_request(transport: str = 'rest', request_type=language_service.ClassifyTextRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.classify_text(request) - - -def test_classify_text_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.ClassifyTextResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.ClassifyTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.classify_text(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta2/documents:classifyText" % client.transport._host, args[1]) - - -def test_classify_text_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.classify_text( - language_service.ClassifyTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - -def test_classify_text_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.ModerateTextRequest, - dict, -]) -def test_moderate_text_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.ModerateTextResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.ModerateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.moderate_text(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.ModerateTextResponse) - - -def test_moderate_text_rest_required_fields(request_type=language_service.ModerateTextRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).moderate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).moderate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.ModerateTextResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.ModerateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.moderate_text(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_moderate_text_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.moderate_text._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_moderate_text_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_moderate_text") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_moderate_text") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.ModerateTextRequest.pb(language_service.ModerateTextRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.ModerateTextResponse.to_json(language_service.ModerateTextResponse()) - - request = language_service.ModerateTextRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.ModerateTextResponse() - - client.moderate_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_moderate_text_rest_bad_request(transport: str = 'rest', request_type=language_service.ModerateTextRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.moderate_text(request) - - -def test_moderate_text_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.ModerateTextResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.ModerateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.moderate_text(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta2/documents:moderateText" % client.transport._host, args[1]) - - -def test_moderate_text_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.moderate_text( - language_service.ModerateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - ) - - -def test_moderate_text_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - language_service.AnnotateTextRequest, - dict, -]) -def test_annotate_text_rest(request_type): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnnotateTextResponse( - language='language_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnnotateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.annotate_text(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, language_service.AnnotateTextResponse) - assert response.language == 'language_value' - - -def test_annotate_text_rest_required_fields(request_type=language_service.AnnotateTextRequest): - transport_class = transports.LanguageServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).annotate_text._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = language_service.AnnotateTextResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = language_service.AnnotateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.annotate_text(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_annotate_text_rest_unset_required_fields(): - transport = transports.LanguageServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.annotate_text._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("document", "features", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_annotate_text_rest_interceptors(null_interceptor): - transport = transports.LanguageServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LanguageServiceRestInterceptor(), - ) - client = LanguageServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "post_annotate_text") as post, \ - mock.patch.object(transports.LanguageServiceRestInterceptor, "pre_annotate_text") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = language_service.AnnotateTextRequest.pb(language_service.AnnotateTextRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = language_service.AnnotateTextResponse.to_json(language_service.AnnotateTextResponse()) - - request = language_service.AnnotateTextRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = language_service.AnnotateTextResponse() - - client.annotate_text(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_annotate_text_rest_bad_request(transport: str = 'rest', request_type=language_service.AnnotateTextRequest): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.annotate_text(request) - - -def test_annotate_text_rest_flattened(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = language_service.AnnotateTextResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = language_service.AnnotateTextResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.annotate_text(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta2/documents:annotateText" % client.transport._host, args[1]) - - -def test_annotate_text_rest_flattened_error(transport: str = 'rest'): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.annotate_text( - language_service.AnnotateTextRequest(), - document=language_service.Document(type_=language_service.Document.Type.PLAIN_TEXT), - features=language_service.AnnotateTextRequest.Features(extract_syntax=True), - encoding_type=language_service.EncodingType.UTF8, - ) - - -def test_annotate_text_rest_error(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = LanguageServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.LanguageServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceGrpcAsyncIOTransport, - transports.LanguageServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = LanguageServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.LanguageServiceGrpcTransport, - ) - -def test_language_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.LanguageServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_language_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.LanguageServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'analyze_sentiment', - 'analyze_entities', - 'analyze_entity_sentiment', - 'analyze_syntax', - 'classify_text', - 'moderate_text', - 'annotate_text', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_language_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LanguageServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_language_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LanguageServiceTransport() - adc.assert_called_once() - - -def test_language_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LanguageServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceGrpcAsyncIOTransport, - ], -) -def test_language_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-language', 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceGrpcAsyncIOTransport, - transports.LanguageServiceRestTransport, - ], -) -def test_language_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.LanguageServiceGrpcTransport, grpc_helpers), - (transports.LanguageServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_language_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "language.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-language', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="language.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) -def test_language_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_language_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.LanguageServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_language_service_host_no_port(transport_name): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'language.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://language.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_language_service_host_with_port(transport_name): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='language.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'language.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://language.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_language_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = LanguageServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = LanguageServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.analyze_sentiment._session - session2 = client2.transport.analyze_sentiment._session - assert session1 != session2 - session1 = client1.transport.analyze_entities._session - session2 = client2.transport.analyze_entities._session - assert session1 != session2 - session1 = client1.transport.analyze_entity_sentiment._session - session2 = client2.transport.analyze_entity_sentiment._session - assert session1 != session2 - session1 = client1.transport.analyze_syntax._session - session2 = client2.transport.analyze_syntax._session - assert session1 != session2 - session1 = client1.transport.classify_text._session - session2 = client2.transport.classify_text._session - assert session1 != session2 - session1 = client1.transport.moderate_text._session - session2 = client2.transport.moderate_text._session - assert session1 != session2 - session1 = client1.transport.annotate_text._session - session2 = client2.transport.annotate_text._session - assert session1 != session2 -def test_language_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LanguageServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_language_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LanguageServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) -def test_language_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport]) -def test_language_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = LanguageServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = LanguageServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = LanguageServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = LanguageServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = LanguageServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = LanguageServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = LanguageServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = LanguageServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = LanguageServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = LanguageServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = LanguageServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.LanguageServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = LanguageServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = LanguageServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (LanguageServiceClient, transports.LanguageServiceGrpcTransport), - (LanguageServiceAsyncClient, transports.LanguageServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_moderate_text_async.py b/samples/generated_samples/language_v1_generated_language_service_moderate_text_async.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_moderate_text_async.py rename to samples/generated_samples/language_v1_generated_language_service_moderate_text_async.py diff --git a/owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_moderate_text_sync.py b/samples/generated_samples/language_v1_generated_language_service_moderate_text_sync.py similarity index 100% rename from owl-bot-staging/v1/samples/generated_samples/language_v1_generated_language_service_moderate_text_sync.py rename to samples/generated_samples/language_v1_generated_language_service_moderate_text_sync.py diff --git a/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json index 936a8b70..4e481f59 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json @@ -1024,6 +1024,167 @@ } ], "title": "language_v1_generated_language_service_classify_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient", + "shortName": "LanguageServiceAsyncClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceAsyncClient.moderate_text", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.ModerateText", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ModerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.ModerateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.ModerateTextResponse", + "shortName": "moderate_text" + }, + "description": "Sample for ModerateText", + "file": "language_v1_generated_language_service_moderate_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_ModerateText_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_moderate_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.language_v1.LanguageServiceClient", + "shortName": "LanguageServiceClient" + }, + "fullName": "google.cloud.language_v1.LanguageServiceClient.moderate_text", + "method": { + "fullName": "google.cloud.language.v1.LanguageService.ModerateText", + "service": { + "fullName": "google.cloud.language.v1.LanguageService", + "shortName": "LanguageService" + }, + "shortName": "ModerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.language_v1.types.ModerateTextRequest" + }, + { + "name": "document", + "type": "google.cloud.language_v1.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.language_v1.types.ModerateTextResponse", + "shortName": "moderate_text" + }, + "description": "Sample for ModerateText", + "file": "language_v1_generated_language_service_moderate_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "language_v1_generated_LanguageService_ModerateText_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "language_v1_generated_language_service_moderate_text_sync.py" } ] } diff --git a/scripts/fixup_language_v1_keywords.py b/scripts/fixup_language_v1_keywords.py index fc15df57..10fa218c 100644 --- a/scripts/fixup_language_v1_keywords.py +++ b/scripts/fixup_language_v1_keywords.py @@ -45,6 +45,7 @@ class languageCallTransformer(cst.CSTTransformer): 'analyze_syntax': ('document', 'encoding_type', ), 'annotate_text': ('document', 'features', 'encoding_type', ), 'classify_text': ('document', 'classification_model_options', ), + 'moderate_text': ('document', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/tests/unit/gapic/language_v1/test_language_service.py b/tests/unit/gapic/language_v1/test_language_service.py index 4904c456..77957902 100644 --- a/tests/unit/gapic/language_v1/test_language_service.py +++ b/tests/unit/gapic/language_v1/test_language_service.py @@ -1700,6 +1700,183 @@ async def test_classify_text_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + language_service.ModerateTextRequest, + dict, + ], +) +def test_moderate_text(request_type, transport: str = "grpc"): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.moderate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse() + response = client.moderate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +def test_moderate_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.moderate_text), "__call__") as call: + client.moderate_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + + +@pytest.mark.asyncio +async def test_moderate_text_async( + transport: str = "grpc_asyncio", request_type=language_service.ModerateTextRequest +): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.moderate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.ModerateTextResponse() + ) + response = await client.moderate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == language_service.ModerateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +@pytest.mark.asyncio +async def test_moderate_text_async_from_dict(): + await test_moderate_text_async(request_type=dict) + + +def test_moderate_text_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.moderate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.moderate_text( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + assert arg == mock_val + + +def test_moderate_text_flattened_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + +@pytest.mark.asyncio +async def test_moderate_text_flattened_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.moderate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ModerateTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.ModerateTextResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.moderate_text( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_moderate_text_flattened_error_async(): + client = LanguageServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3246,6 +3423,269 @@ def test_classify_text_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + language_service.ModerateTextRequest, + dict, + ], +) +def test_moderate_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.moderate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ModerateTextResponse) + + +def test_moderate_text_rest_required_fields( + request_type=language_service.ModerateTextRequest, +): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).moderate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).moderate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.moderate_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_moderate_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.moderate_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_moderate_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_moderate_text" + ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "pre_moderate_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.ModerateTextRequest.pb( + language_service.ModerateTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.ModerateTextResponse.to_json( + language_service.ModerateTextResponse() + ) + + request = language_service.ModerateTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.ModerateTextResponse() + + client.moderate_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_moderate_text_rest_bad_request( + transport: str = "rest", request_type=language_service.ModerateTextRequest +): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.moderate_text(request) + + +def test_moderate_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ModerateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ModerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.moderate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/documents:moderateText" % client.transport._host, args[1] + ) + + +def test_moderate_text_rest_flattened_error(transport: str = "rest"): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.moderate_text( + language_service.ModerateTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + +def test_moderate_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3668,6 +4108,7 @@ def test_language_service_base_transport(): "analyze_entity_sentiment", "analyze_syntax", "classify_text", + "moderate_text", "annotate_text", ) for method in methods: @@ -3954,6 +4395,9 @@ def test_language_service_client_transport_session_collision(transport_name): session1 = client1.transport.classify_text._session session2 = client2.transport.classify_text._session assert session1 != session2 + session1 = client1.transport.moderate_text._session + session2 = client2.transport.moderate_text._session + assert session1 != session2 session1 = client1.transport.annotate_text._session session2 = client2.transport.annotate_text._session assert session1 != session2