diff --git a/compute/compute/snippets/README.md b/compute/compute/snippets/README.md new file mode 100644 index 000000000000..966f4dfe9191 --- /dev/null +++ b/compute/compute/snippets/README.md @@ -0,0 +1,31 @@ +# google-cloud-compute library samples + +These samples demonstrate usage of the google-cloud-compute library to interact +with the Google Compute Engine API. + +## Running the quickstart script + +### Before you begin + +1. If you haven't already, set up a Python Development Environment by following the [python setup guide](https://cloud.google.com/python/setup) and +[create a project](https://cloud.google.com/resource-manager/docs/creating-managing-projects#creating_a_project). + +1. Use `gcloud auth application-default login` to allow the script to authenticate using +your credentials to the Google Cloud APIs. + +### Install requirements + +Create a new virtual environment and install the required libraries. +```bash +virtualenv --python python3 name-of-your-virtualenv +source name-of-your-virtualenv/bin/activate +pip install -r requirements.txt +``` + +### Run the demo + +Run the quickstart script, it will create and destroy a `n1-standard-1` +type machine in the `europe-central2-b` zone. +```bash +python quickstart.py +``` diff --git a/compute/compute/snippets/noxfile.py b/compute/compute/snippets/noxfile.py new file mode 100644 index 000000000000..5ff9e1db5808 --- /dev/null +++ b/compute/compute/snippets/noxfile.py @@ -0,0 +1,259 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys +from typing import Callable, Dict, List, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + 'enforce_type_hints': False, + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append('.') + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG['gcloud_project_env'] + # This should error out if not set. + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG['envs']) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir: str) -> List[str]: + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG['enforce_type_hints']: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + "." + ] + session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install("black") + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/compute/compute/snippets/quickstart.py b/compute/compute/snippets/quickstart.py new file mode 100644 index 000000000000..478a5f707a6d --- /dev/null +++ b/compute/compute/snippets/quickstart.py @@ -0,0 +1,275 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +A sample script showing how to create, list and delete Google Compute Engine +instances using the google-cloud-compute library. It can be run from command +line to create, list and delete an instance in a given project in a given zone. +""" + +# [START compute_instances_create] +# [START compute_instances_delete] +import sys + +# [START compute_instances_list] +# [START compute_instances_list_all] +# [START compute_instances_operation_check] +import typing + +import google.cloud.compute_v1 as compute_v1 + +# [END compute_instances_operation_check] +# [END compute_instances_list_all] +# [END compute_instances_list] +# [END compute_instances_delete] +# [END compute_instances_create] + + +# [START compute_instances_list] +def list_instances(project_id: str, zone: str) -> typing.Iterable[compute_v1.Instance]: + """ + Gets a list of instances created in given project in given zone. + Returns an iterable collection of Instance objects. + + Args: + project_id: ID or number of the project you want to use. + zone: Name of the zone you want to check, for example: us-west3-b + + Returns: + An iterable collection of Instance objects. + """ + instance_client = compute_v1.InstancesClient() + instance_list = instance_client.list(project=project_id, zone=zone) + + print(f"Instances found in zone {zone}:") + for instance in instance_list: + print(f" - {instance.name} ({instance.machine_type})") + + return instance_list + + +# [END compute_instances_list] + +# [START compute_instances_list_all] +def list_all_instances( + project_id: str, +) -> typing.Dict[str, typing.Iterable[compute_v1.Instance]]: + """ + Returns a dictionary of all instances present in a project, grouped by their zone. + + Args: + project_id: ID or number of the project you want to use. + + Returns: + A dictionary with zone names as keys (in form of "zones/{zone_name}") and + iterable collections of Instance objects as values. + """ + instance_client = compute_v1.InstancesClient() + agg_list = instance_client.aggregated_list(project=project_id) + all_instances = {} + print("Instances found:") + for zone, response in agg_list: + if response.instances: + all_instances[zone] = response.instances + print(f" {zone}:") + for instance in response.instances: + print(f" - {instance.name} ({instance.machine_type})") + return all_instances + + +# [END compute_instances_list_all] + + +# [START compute_instances_create] +def create_instance( + project_id: str, + zone: str, + instance_name: str, + machine_type: str = "n1-standard-1", + source_image: str = "projects/debian-cloud/global/images/family/debian-10", + network_name: str = "global/networks/default", +) -> compute_v1.Instance: + """ + Sends an instance creation request to GCP and waits for it to complete. + + Args: + project_id: ID or number of the project you want to use. + zone: Name of the zone you want to use, for example: us-west3-b + instance_name: Name of the new machine. + machine_type: Machine type you want to create in following format: + "zones/{zone}/machineTypes/{type_name}". For example: + "zones/europe-west3-c/machineTypes/f1-micro" + You can find the list of available machine types using: + https://cloud.google.com/sdk/gcloud/reference/compute/machine-types/list + source_image: Path the the disk image you want to use for your boot + disk. This can be one of the public images + (e.g. "projects/debian-cloud/global/images/family/debian-10") + or a private image you have access to. + You can check the list of available public images using: + $ gcloud compute images list + network_name: Name of the network you want the new instance to use. + For example: global/networks/default - if you want to use the + default network. + + Returns: + Instance object. + """ + instance_client = compute_v1.InstancesClient() + + # Every machine requires at least one persistent disk + disk = compute_v1.AttachedDisk() + initialize_params = compute_v1.AttachedDiskInitializeParams() + initialize_params.source_image = ( + source_image # "projects/debian-cloud/global/images/family/debian-10" + ) + initialize_params.disk_size_gb = "10" + disk.initialize_params = initialize_params + disk.auto_delete = True + disk.boot = True + disk.type_ = compute_v1.AttachedDisk.Type.PERSISTENT + + # Every machine needs to be connected to a VPC network. + # The 'default' network is created automatically in every project. + network_interface = compute_v1.NetworkInterface() + network_interface.name = network_name + + # Collecting all the information into the Instance object + instance = compute_v1.Instance() + instance.name = instance_name + instance.disks = [disk] + full_machine_type_name = f"zones/{zone}/machineTypes/{machine_type}" + instance.machine_type = full_machine_type_name + instance.network_interfaces = [network_interface] + + # Preparing the InsertInstanceRequest + request = compute_v1.InsertInstanceRequest() + request.zone = zone + request.project = project_id + request.instance_resource = instance + + print(f"Creating the {instance_name} instance in {zone}...") + operation = instance_client.insert(request=request) + if operation.status == compute_v1.Operation.Status.RUNNING: + operation_client = compute_v1.ZoneOperationsClient() + operation = operation_client.wait( + operation=operation.name, zone=zone, project=project_id + ) + if operation.error: + print("Error during creation:", operation.error, file=sys.stderr) + if operation.warnings: + print("Warning during creation:", operation.warnings, file=sys.stderr) + print(f"Instance {instance_name} created.") + return instance + + +# [END compute_instances_create] + + +# [START compute_instances_delete] +def delete_instance(project_id: str, zone: str, machine_name: str) -> None: + """ + Sends a delete request to GCP and waits for it to complete. + + Args: + project_id: ID or number of the project you want to use. + zone: Name of the zone you want to use, for example: us-west3-b + machine_name: Name of the machine you want to delete. + """ + instance_client = compute_v1.InstancesClient() + + print(f"Deleting {machine_name} from {zone}...") + operation = instance_client.delete( + project=project_id, zone=zone, instance=machine_name + ) + if operation.status == compute_v1.Operation.Status.RUNNING: + operation_client = compute_v1.ZoneOperationsClient() + operation = operation_client.wait( + operation=operation.name, zone=zone, project=project_id + ) + if operation.error: + print("Error during deletion:", operation.error, file=sys.stderr) + if operation.warnings: + print("Warning during deletion:", operation.warnings, file=sys.stderr) + print(f"Instance {machine_name} deleted.") + return + + +# [END compute_instances_delete] + + +# [START compute_instances_operation_check] +def wait_for_operation( + operation: compute_v1.Operation, project_id: str +) -> compute_v1.Operation: + """ + This method waits for an operation to be completed. Calling this function + will block until the operation is finished. + + Args: + operation: The Operation object representing the operation you want to + wait on. + project_id: ID or number of the project owning the operation. + + Returns: + Finished Operation object. + """ + kwargs = {"project": project_id, "operation": operation.name} + if operation.zone: + client = compute_v1.ZoneOperationsClient() + # Operation.zone is a full URL address of a zone, so we need to extract just the name + kwargs["zone"] = operation.zone.rsplit("/", maxsplit=1)[1] + elif operation.region: + client = compute_v1.RegionOperationsClient() + # Operation.region is a full URL address of a zone, so we need to extract just the name + kwargs["region"] = operation.region.rsplit("/", maxsplit=1)[1] + else: + client = compute_v1.GlobalOperationsClient() + return client.wait(**kwargs) + + +# [END compute_instances_operation_check] + + +def main(project_id: str, zone: str, instance_name: str) -> None: + + create_instance(project_id, zone, instance_name) + + zone_instances = list_instances(project_id, zone) + print(f"Instances found in {zone}:", ", ".join(i.name for i in zone_instances)) + + all_instances = list_all_instances(project_id) + print(f"Instances found in project {project_id}:") + for i_zone, instances in all_instances.items(): + print(f"{i_zone}:", ", ".join(i.name for i in instances)) + + delete_instance(project_id, zone, instance_name) + + +if __name__ == "__main__": + import uuid + import google.auth + import google.auth.exceptions + + try: + default_project_id = google.auth.default()[1] + except google.auth.exceptions.DefaultCredentialsError: + print( + "Please use `gcloud auth application-default login` " + "or set GOOGLE_APPLICATION_CREDENTIALS to use this script." + ) + else: + instance_name = "quickstart-" + uuid.uuid4().hex[:10] + instance_zone = "europe-central2-b" + main(default_project_id, instance_zone, instance_name) diff --git a/compute/compute/snippets/requirements-test.txt b/compute/compute/snippets/requirements-test.txt new file mode 100644 index 000000000000..11b890faecfe --- /dev/null +++ b/compute/compute/snippets/requirements-test.txt @@ -0,0 +1 @@ +pytest==6.2.4 \ No newline at end of file diff --git a/compute/compute/snippets/requirements.txt b/compute/compute/snippets/requirements.txt new file mode 100644 index 000000000000..8a2294e1e0ad --- /dev/null +++ b/compute/compute/snippets/requirements.txt @@ -0,0 +1 @@ +google-cloud-compute==0.3.0 \ No newline at end of file diff --git a/compute/compute/snippets/test_quickstart.py b/compute/compute/snippets/test_quickstart.py new file mode 100644 index 000000000000..91a2d3642ab2 --- /dev/null +++ b/compute/compute/snippets/test_quickstart.py @@ -0,0 +1,36 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +import typing +import uuid + +import google.auth + +from samples.snippets.quickstart import main + +PROJECT = google.auth.default()[1] +INSTANCE_NAME = "i" + uuid.uuid4().hex[:10] +INSTANCE_ZONE = "europe-central2-b" + + +def test_main(capsys: typing.Any) -> None: + main(PROJECT, INSTANCE_ZONE, INSTANCE_NAME) + + out, _ = capsys.readouterr() + + assert f"Instance {INSTANCE_NAME} created." in out + assert re.search(f"Instances found in {INSTANCE_ZONE}:.+{INSTANCE_NAME}", out) + assert re.search(f"zones/{INSTANCE_ZONE}:.+{INSTANCE_NAME}", out) + assert f"Instance {INSTANCE_NAME} deleted." in out