From 51e1478ef98713c15aea956dd82bbc779af361f5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Dec 2023 16:48:01 -0500 Subject: [PATCH] feat: [google-cloud-compute] Update Compute Engine API to revision 20231110 (#868) (#12092) - [ ] Regenerate this pull request now. Source-Link: https://github.com/googleapis/googleapis/commit/05186cd29d147d9adbc1701b5ddb655c46f486f4 Source-Link: https://github.com/googleapis/googleapis-gen/commit/dc566d3f71567bd40ea3f896b3e0cdaa73e9499d Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWNvbXB1dGUvLk93bEJvdC55YW1sIiwiaCI6ImRjNTY2ZDNmNzE1NjdiZDQwZWEzZjg5NmIzZTBjZGFhNzNlOTQ5OWQifQ== --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../docs/compute_v1/services_.rst | 1 + .../compute_v1/snapshot_settings_service.rst | 6 + .../google/cloud/compute/__init__.py | 120 + .../google/cloud/compute_v1/__init__.py | 118 + .../cloud/compute_v1/gapic_metadata.json | 144 + .../services/addresses/transports/rest.py | 8 +- .../services/autoscalers/transports/rest.py | 8 +- .../services/backend_buckets/client.py | 422 + .../backend_buckets/transports/base.py | 44 + .../backend_buckets/transports/rest.py | 456 +- .../services/backend_services/client.py | 303 +- .../services/backend_services/pagers.py | 62 + .../backend_services/transports/base.py | 33 + .../backend_services/transports/rest.py | 342 +- .../cloud/compute_v1/services/disks/client.py | 56 +- .../services/disks/transports/rest.py | 80 +- .../external_vpn_gateways/transports/rest.py | 6 +- .../services/firewall_policies/client.py | 56 +- .../firewall_policies/transports/rest.py | 76 +- .../services/firewalls/transports/rest.py | 8 +- .../forwarding_rules/transports/rest.py | 10 +- .../global_addresses/transports/rest.py | 8 +- .../transports/rest.py | 10 +- .../global_network_endpoint_groups/client.py | 9 +- .../transports/rest.py | 17 +- .../services/global_operations/client.py | 4 +- .../global_operations/transports/rest.py | 4 +- .../global_organization_operations/client.py | 2 +- .../transports/rest.py | 2 +- .../transports/rest.py | 6 +- .../services/health_checks/client.py | 41 +- .../services/health_checks/transports/rest.py | 50 +- .../compute_v1/services/images/client.py | 56 +- .../services/images/transports/rest.py | 66 +- .../instance_group_managers/client.py | 8 +- .../transports/rest.py | 32 +- .../instance_groups/transports/rest.py | 10 +- .../services/instance_templates/client.py | 74 +- .../instance_templates/transports/rest.py | 78 +- .../compute_v1/services/instances/client.py | 373 +- .../services/instances/transports/base.py | 14 + .../services/instances/transports/rest.py | 273 +- .../transports/rest.py | 8 +- .../services/interconnects/client.py | 127 +- .../services/interconnects/transports/base.py | 17 + .../services/interconnects/transports/rest.py | 140 +- .../compute_v1/services/licenses/client.py | 56 +- .../services/licenses/transports/rest.py | 60 +- .../services/machine_images/client.py | 56 +- .../machine_images/transports/rest.py | 60 +- .../services/network_attachments/client.py | 361 +- .../network_attachments/transports/base.py | 14 + .../network_attachments/transports/rest.py | 207 +- .../transports/rest.py | 6 +- .../network_endpoint_groups/client.py | 9 +- .../transports/rest.py | 17 +- .../network_firewall_policies/client.py | 56 +- .../transports/rest.py | 74 +- .../services/networks/transports/rest.py | 14 +- .../compute_v1/services/node_groups/client.py | 56 +- .../services/node_groups/transports/rest.py | 70 +- .../services/node_templates/client.py | 56 +- .../node_templates/transports/rest.py | 60 +- .../packet_mirrorings/transports/rest.py | 6 +- .../services/projects/transports/rest.py | 18 +- .../public_advertised_prefixes/client.py | 524 + .../transports/base.py | 28 + .../transports/rest.py | 286 +- .../public_delegated_prefixes/client.py | 582 ++ .../transports/base.py | 28 + .../transports/rest.py | 286 +- .../region_autoscalers/transports/rest.py | 8 +- .../region_backend_services/client.py | 661 +- .../region_backend_services/pagers.py | 62 + .../transports/base.py | 47 + .../transports/rest.py | 496 +- .../region_commitments/transports/rest.py | 4 +- .../services/region_disks/client.py | 56 +- .../services/region_disks/transports/rest.py | 80 +- .../transports/rest.py | 6 +- .../services/region_health_checks/client.py | 41 +- .../region_health_checks/transports/rest.py | 50 +- .../transports/rest.py | 28 +- .../region_instance_groups/transports/rest.py | 2 +- .../region_instance_templates/client.py | 18 +- .../transports/rest.py | 22 +- .../region_instances/transports/rest.py | 2 +- .../region_network_endpoint_groups/client.py | 919 +- .../region_network_endpoint_groups/pagers.py | 64 + .../transports/base.py | 45 + .../transports/rest.py | 472 +- .../client.py | 56 +- .../transports/rest.py | 74 +- .../transports/rest.py | 4 +- .../services/region_operations/client.py | 4 +- .../region_operations/transports/rest.py | 4 +- .../region_security_policies/client.py | 1444 ++- .../transports/base.py | 56 + .../transports/rest.py | 571 +- .../region_ssl_certificates/client.py | 30 +- .../transports/rest.py | 34 +- .../region_ssl_policies/transports/rest.py | 6 +- .../region_target_http_proxies/client.py | 19 +- .../transports/rest.py | 25 +- .../region_target_https_proxies/client.py | 16 +- .../transports/rest.py | 26 +- .../transports/rest.py | 4 +- .../services/region_url_maps/client.py | 30 +- .../region_url_maps/transports/rest.py | 38 +- .../services/reservations/client.py | 56 +- .../services/reservations/transports/rest.py | 64 +- .../services/resource_policies/client.py | 56 +- .../resource_policies/transports/rest.py | 62 +- .../compute_v1/services/routers/client.py | 126 + .../services/routers/transports/base.py | 14 + .../services/routers/transports/rest.py | 133 +- .../services/routes/transports/rest.py | 4 +- .../security_policies/transports/rest.py | 14 +- .../services/service_attachments/client.py | 56 +- .../service_attachments/transports/rest.py | 62 +- .../snapshot_settings_service/__init__.py | 18 + .../snapshot_settings_service/client.py | 785 ++ .../transports/__init__.py | 35 + .../transports/base.py | 186 + .../transports/rest.py | 454 + .../compute_v1/services/snapshots/client.py | 56 +- .../services/snapshots/transports/rest.py | 62 +- .../services/ssl_certificates/client.py | 30 +- .../ssl_certificates/transports/rest.py | 34 +- .../services/ssl_policies/transports/rest.py | 6 +- .../compute_v1/services/subnetworks/client.py | 56 +- .../services/subnetworks/transports/rest.py | 66 +- .../target_grpc_proxies/transports/rest.py | 6 +- .../services/target_http_proxies/client.py | 19 +- .../target_http_proxies/transports/rest.py | 27 +- .../services/target_https_proxies/client.py | 16 +- .../target_https_proxies/transports/rest.py | 32 +- .../services/target_instances/client.py | 321 + .../target_instances/transports/base.py | 14 + .../target_instances/transports/rest.py | 157 +- .../services/target_pools/client.py | 321 + .../services/target_pools/transports/base.py | 14 + .../services/target_pools/transports/rest.py | 165 +- .../target_ssl_proxies/transports/rest.py | 14 +- .../target_tcp_proxies/transports/rest.py | 8 +- .../target_vpn_gateways/transports/rest.py | 6 +- .../compute_v1/services/url_maps/client.py | 30 +- .../services/url_maps/transports/rest.py | 40 +- .../services/vpn_gateways/transports/rest.py | 6 +- .../services/vpn_tunnels/transports/rest.py | 6 +- .../services/zone_operations/client.py | 4 +- .../zone_operations/transports/rest.py | 4 +- .../google/cloud/compute_v1/types/__init__.py | 116 + .../google/cloud/compute_v1/types/compute.py | 9148 ++++++++++++----- ...ted_backend_buckets_get_iam_policy_sync.py | 53 + ...ted_backend_buckets_set_iam_policy_sync.py | 53 + ...ckend_buckets_test_iam_permissions_sync.py | 53 + ...rated_backend_services_list_usable_sync.py | 53 + ...kend_services_test_iam_permissions_sync.py | 53 + ...ated_instances_set_security_policy_sync.py | 54 + ...ed_interconnects_get_macsec_config_sync.py | 53 + ...enerated_network_attachments_patch_sync.py | 54 + ...ublic_advertised_prefixes_announce_sync.py | 53 + ...ublic_advertised_prefixes_withdraw_sync.py | 53 + ...public_delegated_prefixes_announce_sync.py | 54 + ...public_delegated_prefixes_withdraw_sync.py | 54 + ...egion_backend_services_list_usable_sync.py | 54 + ...ckend_services_set_security_policy_sync.py | 54 + ...kend_services_test_iam_permissions_sync.py | 54 + ...nt_groups_attach_network_endpoints_sync.py | 54 + ...nt_groups_detach_network_endpoints_sync.py | 54 + ...oint_groups_list_network_endpoints_sync.py | 55 + ..._region_security_policies_add_rule_sync.py | 54 + ..._region_security_policies_get_rule_sync.py | 54 + ...egion_security_policies_patch_rule_sync.py | 54 + ...gion_security_policies_remove_rule_sync.py | 54 + ..._generated_routers_get_nat_ip_info_sync.py | 54 + ...ated_snapshot_settings_service_get_sync.py | 52 + ...ed_snapshot_settings_service_patch_sync.py | 52 + ...rget_instances_set_security_policy_sync.py | 54 + ...d_target_pools_set_security_policy_sync.py | 54 + ...ppet_metadata_google.cloud.compute.v1.json | 6136 +++++++---- .../scripts/fixup_compute_v1_keywords.py | 8 +- .../tests/system/__init__.py | 15 + .../compute_v1/test_accelerator_types.py | 2 + .../unit/gapic/compute_v1/test_addresses.py | 2 + .../unit/gapic/compute_v1/test_autoscalers.py | 2 + .../gapic/compute_v1/test_backend_buckets.py | 1161 ++- .../gapic/compute_v1/test_backend_services.py | 2343 +++-- .../unit/gapic/compute_v1/test_disk_types.py | 2 + .../tests/unit/gapic/compute_v1/test_disks.py | 6 + .../gapic/compute_v1/test_forwarding_rules.py | 2 + .../compute_v1/test_global_operations.py | 2 + .../test_global_public_delegated_prefixes.py | 22 +- .../gapic/compute_v1/test_health_checks.py | 2 + .../test_instance_group_managers.py | 60 +- .../gapic/compute_v1/test_instance_groups.py | 2 + .../compute_v1/test_instance_templates.py | 4 + .../unit/gapic/compute_v1/test_instances.py | 1258 ++- .../test_interconnect_attachments.py | 2 + .../compute_v1/test_interconnect_locations.py | 4 + .../gapic/compute_v1/test_interconnects.py | 357 + .../gapic/compute_v1/test_machine_images.py | 2 + .../gapic/compute_v1/test_machine_types.py | 2 + .../compute_v1/test_network_attachments.py | 928 ++ .../test_network_edge_security_services.py | 2 + .../test_network_endpoint_groups.py | 2 + .../unit/gapic/compute_v1/test_node_groups.py | 2 + .../gapic/compute_v1/test_node_templates.py | 2 + .../unit/gapic/compute_v1/test_node_types.py | 2 + .../compute_v1/test_packet_mirrorings.py | 2 + .../test_public_advertised_prefixes.py | 2964 ++++-- .../test_public_delegated_prefixes.py | 3024 ++++-- .../test_region_backend_services.py | 2715 ++++- .../compute_v1/test_region_commitments.py | 2 + .../gapic/compute_v1/test_region_disks.py | 4 + .../test_region_instance_group_managers.py | 58 +- .../test_region_instance_templates.py | 2 + .../gapic/compute_v1/test_region_instances.py | 2 + .../test_region_network_endpoint_groups.py | 2985 +++++- .../test_region_security_policies.py | 5254 ++++++++-- .../gapic/compute_v1/test_reservations.py | 2 + .../compute_v1/test_resource_policies.py | 2 + .../unit/gapic/compute_v1/test_routers.py | 362 + .../compute_v1/test_security_policies.py | 254 +- .../compute_v1/test_service_attachments.py | 2 + .../test_snapshot_settings_service.py | 2130 ++++ .../unit/gapic/compute_v1/test_snapshots.py | 9 + .../gapic/compute_v1/test_ssl_certificates.py | 2 + .../gapic/compute_v1/test_ssl_policies.py | 2 + .../unit/gapic/compute_v1/test_subnetworks.py | 2 + .../compute_v1/test_target_http_proxies.py | 2 + .../compute_v1/test_target_https_proxies.py | 2 + .../gapic/compute_v1/test_target_instances.py | 852 ++ .../gapic/compute_v1/test_target_pools.py | 836 ++ .../compute_v1/test_target_tcp_proxies.py | 2 + .../compute_v1/test_target_vpn_gateways.py | 2 + .../unit/gapic/compute_v1/test_url_maps.py | 2 + .../gapic/compute_v1/test_vpn_gateways.py | 2 + .../unit/gapic/compute_v1/test_vpn_tunnels.py | 2 + 240 files changed, 48611 insertions(+), 11862 deletions(-) create mode 100644 packages/google-cloud-compute/docs/compute_v1/snapshot_settings_service.rst create mode 100644 packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/__init__.py create mode 100644 packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py create mode 100644 packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/__init__.py create mode 100644 packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/base.py create mode 100644 packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/rest.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_get_iam_policy_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_set_iam_policy_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_test_iam_permissions_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_services_list_usable_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_services_test_iam_permissions_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instances_set_security_policy_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_interconnects_get_macsec_config_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_network_attachments_patch_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_announce_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_withdraw_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_announce_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_withdraw_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_services_list_usable_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_services_set_security_policy_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_services_test_iam_permissions_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_attach_network_endpoints_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_detach_network_endpoints_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_list_network_endpoints_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_add_rule_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_get_rule_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_patch_rule_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_remove_rule_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_routers_get_nat_ip_info_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_snapshot_settings_service_get_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_snapshot_settings_service_patch_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_target_instances_set_security_policy_sync.py create mode 100644 packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_target_pools_set_security_policy_sync.py create mode 100644 packages/google-cloud-compute/tests/system/__init__.py create mode 100644 packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py diff --git a/packages/google-cloud-compute/docs/compute_v1/services_.rst b/packages/google-cloud-compute/docs/compute_v1/services_.rst index 92385f57114c..0aa09bac57fc 100644 --- a/packages/google-cloud-compute/docs/compute_v1/services_.rst +++ b/packages/google-cloud-compute/docs/compute_v1/services_.rst @@ -77,6 +77,7 @@ Services for Google Cloud Compute v1 API security_policies service_attachments snapshots + snapshot_settings_service ssl_certificates ssl_policies subnetworks diff --git a/packages/google-cloud-compute/docs/compute_v1/snapshot_settings_service.rst b/packages/google-cloud-compute/docs/compute_v1/snapshot_settings_service.rst new file mode 100644 index 000000000000..24492169147a --- /dev/null +++ b/packages/google-cloud-compute/docs/compute_v1/snapshot_settings_service.rst @@ -0,0 +1,6 @@ +SnapshotSettingsService +----------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.snapshot_settings_service + :members: + :inherited-members: diff --git a/packages/google-cloud-compute/google/cloud/compute/__init__.py b/packages/google-cloud-compute/google/cloud/compute/__init__.py index 86c395be0e51..b0138e51e33e 100644 --- a/packages/google-cloud-compute/google/cloud/compute/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute/__init__.py @@ -185,6 +185,9 @@ from google.cloud.compute_v1.services.service_attachments.client import ( ServiceAttachmentsClient, ) +from google.cloud.compute_v1.services.snapshot_settings_service.client import ( + SnapshotSettingsServiceClient, +) from google.cloud.compute_v1.services.snapshots.client import SnapshotsClient from google.cloud.compute_v1.services.ssl_certificates.client import ( SslCertificatesClient, @@ -247,6 +250,7 @@ AddRuleFirewallPolicyRequest, AddRuleNetworkFirewallPolicyRequest, AddRuleRegionNetworkFirewallPolicyRequest, + AddRuleRegionSecurityPolicyRequest, AddRuleSecurityPolicyRequest, AddSignedUrlKeyBackendBucketRequest, AddSignedUrlKeyBackendServiceRequest, @@ -299,6 +303,8 @@ AllocationSpecificSKUAllocationReservedInstanceProperties, AllocationSpecificSKUReservation, Allowed, + AnnouncePublicAdvertisedPrefixeRequest, + AnnouncePublicDelegatedPrefixeRequest, ApplyUpdatesToInstancesInstanceGroupManagerRequest, ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, AttachDiskInstanceRequest, @@ -306,6 +312,7 @@ AttachedDiskInitializeParams, AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, AttachNetworkEndpointsNetworkEndpointGroupRequest, + AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, AuditConfig, AuditLogConfig, AuthorizationLoggingOptions, @@ -320,6 +327,7 @@ AutoscalingPolicyLoadBalancingUtilization, AutoscalingPolicyScaleInControl, AutoscalingPolicyScalingSchedule, + AWSV4Signature, Backend, BackendBucket, BackendBucketCdnPolicy, @@ -337,12 +345,14 @@ BackendServiceGroupHealth, BackendServiceIAP, BackendServiceList, + BackendServiceListUsable, BackendServiceLocalityLoadBalancingPolicyConfig, BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy, BackendServiceLocalityLoadBalancingPolicyConfigPolicy, BackendServiceLogConfig, BackendServiceReference, BackendServicesScopedList, + BackendServiceUsedBy, BfdPacket, BfdStatus, BfdStatusPacketCounts, @@ -352,6 +362,7 @@ BulkInsertInstanceRequest, BulkInsertInstanceResource, BulkInsertInstanceResourcePerInstanceProperties, + BulkInsertOperationStatus, BulkInsertRegionDiskRequest, BulkInsertRegionInstanceRequest, CacheInvalidationRule, @@ -470,6 +481,7 @@ DetachDiskInstanceRequest, DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, DetachNetworkEndpointsNetworkEndpointGroupRequest, + DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, DisableXpnHostProjectRequest, DisableXpnResourceProjectRequest, Disk, @@ -558,6 +570,7 @@ GetHealthCheckRequest, GetHealthRegionBackendServiceRequest, GetHealthTargetPoolRequest, + GetIamPolicyBackendBucketRequest, GetIamPolicyBackendServiceRequest, GetIamPolicyDiskRequest, GetIamPolicyFirewallPolicyRequest, @@ -592,6 +605,8 @@ GetLicenseRequest, GetMachineImageRequest, GetMachineTypeRequest, + GetMacsecConfigInterconnectRequest, + GetNatIpInfoRouterRequest, GetNatMappingInfoRoutersRequest, GetNetworkAttachmentRequest, GetNetworkEdgeSecurityServiceRequest, @@ -635,6 +650,7 @@ GetRuleFirewallPolicyRequest, GetRuleNetworkFirewallPolicyRequest, GetRuleRegionNetworkFirewallPolicyRequest, + GetRuleRegionSecurityPolicyRequest, GetRuleSecurityPolicyRequest, GetScreenshotInstanceRequest, GetSecurityPolicyRequest, @@ -642,6 +658,7 @@ GetServiceAttachmentRequest, GetShieldedInstanceIdentityInstanceRequest, GetSnapshotRequest, + GetSnapshotSettingRequest, GetSslCertificateRequest, GetSslPolicyRequest, GetStatusVpnGatewayRequest, @@ -826,6 +843,7 @@ InstanceProperties, InstanceReference, InstancesAddResourcePoliciesRequest, + InstancesBulkInsertOperationMetadata, InstancesGetEffectiveFirewallsResponse, InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, InstancesRemoveResourcePoliciesRequest, @@ -835,6 +853,7 @@ InstancesSetMachineTypeRequest, InstancesSetMinCpuPlatformRequest, InstancesSetNameRequest, + InstancesSetSecurityPolicyRequest, InstancesSetServiceAccountRequest, InstancesStartWithEncryptionKeyRequest, InstanceTemplate, @@ -858,10 +877,15 @@ InterconnectDiagnosticsLinkLACPStatus, InterconnectDiagnosticsLinkOpticalPower, InterconnectDiagnosticsLinkStatus, + InterconnectDiagnosticsMacsecStatus, InterconnectList, InterconnectLocation, InterconnectLocationList, InterconnectLocationRegionInfo, + InterconnectMacsec, + InterconnectMacsecConfig, + InterconnectMacsecConfigPreSharedKey, + InterconnectMacsecPreSharedKey, InterconnectOutageNotification, InterconnectRemoteLocation, InterconnectRemoteLocationConstraints, @@ -869,6 +893,7 @@ InterconnectRemoteLocationList, InterconnectRemoteLocationPermittedConnections, InterconnectsGetDiagnosticsResponse, + InterconnectsGetMacsecConfigResponse, InvalidateCacheUrlMapRequest, Items, License, @@ -920,6 +945,7 @@ ListNetworkEndpointGroupsRequest, ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, ListNetworkEndpointsNetworkEndpointGroupsRequest, + ListNetworkEndpointsRegionNetworkEndpointGroupsRequest, ListNetworkFirewallPoliciesRequest, ListNetworksRequest, ListNodeGroupsRequest, @@ -975,6 +1001,8 @@ ListTargetTcpProxiesRequest, ListTargetVpnGatewaysRequest, ListUrlMapsRequest, + ListUsableBackendServicesRequest, + ListUsableRegionBackendServicesRequest, ListUsableSubnetworksRequest, ListVpnGatewaysRequest, ListVpnTunnelsRequest, @@ -1010,6 +1038,9 @@ MoveGlobalAddressRequest, MoveInstanceProjectRequest, NamedPort, + NatIpInfo, + NatIpInfoNatIpInfoMapping, + NatIpInfoResponse, Network, NetworkAttachment, NetworkAttachmentAggregatedList, @@ -1096,6 +1127,7 @@ PatchInstanceGroupManagerRequest, PatchInterconnectAttachmentRequest, PatchInterconnectRequest, + PatchNetworkAttachmentRequest, PatchNetworkEdgeSecurityServiceRequest, PatchNetworkFirewallPolicyRequest, PatchNetworkRequest, @@ -1120,9 +1152,11 @@ PatchRuleFirewallPolicyRequest, PatchRuleNetworkFirewallPolicyRequest, PatchRuleRegionNetworkFirewallPolicyRequest, + PatchRuleRegionSecurityPolicyRequest, PatchRuleSecurityPolicyRequest, PatchSecurityPolicyRequest, PatchServiceAttachmentRequest, + PatchSnapshotSettingRequest, PatchSslPolicyRequest, PatchSubnetworkRequest, PatchTargetGrpcProxyRequest, @@ -1136,6 +1170,8 @@ PreconfiguredWafSet, PreservedState, PreservedStatePreservedDisk, + PreservedStatePreservedNetworkIp, + PreservedStatePreservedNetworkIpIpAddress, PreviewRouterRequest, Project, ProjectsDisableXpnResourceRequest, @@ -1184,6 +1220,8 @@ RegionInstanceGroupsListInstancesRequest, RegionInstanceGroupsSetNamedPortsRequest, RegionList, + RegionNetworkEndpointGroupsAttachEndpointsRequest, + RegionNetworkEndpointGroupsDetachEndpointsRequest, RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse, RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, RegionSetLabelsRequest, @@ -1203,6 +1241,7 @@ RemoveRuleFirewallPolicyRequest, RemoveRuleNetworkFirewallPolicyRequest, RemoveRuleRegionNetworkFirewallPolicyRequest, + RemoveRuleRegionSecurityPolicyRequest, RemoveRuleSecurityPolicyRequest, RequestMirrorPolicy, Reservation, @@ -1279,6 +1318,7 @@ SecurityPolicy, SecurityPolicyAdaptiveProtectionConfig, SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig, + SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig, SecurityPolicyAdvancedOptionsConfig, SecurityPolicyAdvancedOptionsConfigJsonCustomConfig, SecurityPolicyDdosProtectionConfig, @@ -1290,6 +1330,8 @@ SecurityPolicyRuleHttpHeaderActionHttpHeaderOption, SecurityPolicyRuleMatcher, SecurityPolicyRuleMatcherConfig, + SecurityPolicyRuleNetworkMatcher, + SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch, SecurityPolicyRulePreconfiguredWafConfig, SecurityPolicyRulePreconfiguredWafConfigExclusion, SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams, @@ -1297,6 +1339,7 @@ SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig, SecurityPolicyRuleRateLimitOptionsThreshold, SecurityPolicyRuleRedirectOptions, + SecurityPolicyUserDefinedField, SecuritySettings, SendDiagnosticInterruptInstanceRequest, SendDiagnosticInterruptInstanceResponse, @@ -1314,12 +1357,15 @@ SetBackupTargetPoolRequest, SetCertificateMapTargetHttpsProxyRequest, SetCertificateMapTargetSslProxyRequest, + SetCommonInstanceMetadataOperationMetadata, + SetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo, SetCommonInstanceMetadataProjectRequest, SetDefaultNetworkTierProjectRequest, SetDeletionProtectionInstanceRequest, SetDiskAutoDeleteInstanceRequest, SetEdgeSecurityPolicyBackendBucketRequest, SetEdgeSecurityPolicyBackendServiceRequest, + SetIamPolicyBackendBucketRequest, SetIamPolicyBackendServiceRequest, SetIamPolicyDiskRequest, SetIamPolicyFirewallPolicyRequest, @@ -1372,6 +1418,10 @@ SetQuicOverrideTargetHttpsProxyRequest, SetSchedulingInstanceRequest, SetSecurityPolicyBackendServiceRequest, + SetSecurityPolicyInstanceRequest, + SetSecurityPolicyRegionBackendServiceRequest, + SetSecurityPolicyTargetInstanceRequest, + SetSecurityPolicyTargetPoolRequest, SetServiceAccountInstanceRequest, SetShieldedInstanceIntegrityPolicyInstanceRequest, SetSslCertificatesRegionTargetHttpsProxyRequest, @@ -1400,6 +1450,9 @@ SimulateMaintenanceEventNodeGroupRequest, Snapshot, SnapshotList, + SnapshotSettings, + SnapshotSettingsStorageLocationSettings, + SnapshotSettingsStorageLocationSettingsStorageLocationPreference, SourceDiskEncryptionKey, SourceInstanceParams, SourceInstanceProperties, @@ -1423,6 +1476,8 @@ StatefulPolicy, StatefulPolicyPreservedState, StatefulPolicyPreservedStateDiskDevice, + StatefulPolicyPreservedStateNetworkIp, + Status, StopAsyncReplicationDiskRequest, StopAsyncReplicationRegionDiskRequest, StopGroupAsyncReplicationDiskRequest, @@ -1485,6 +1540,8 @@ TargetVpnGatewaysScopedList, TCPHealthCheck, TestFailure, + TestIamPermissionsBackendBucketRequest, + TestIamPermissionsBackendServiceRequest, TestIamPermissionsDiskRequest, TestIamPermissionsExternalVpnGatewayRequest, TestIamPermissionsFirewallPolicyRequest, @@ -1500,6 +1557,7 @@ TestIamPermissionsNodeGroupRequest, TestIamPermissionsNodeTemplateRequest, TestIamPermissionsPacketMirroringRequest, + TestIamPermissionsRegionBackendServiceRequest, TestIamPermissionsRegionDiskRequest, TestIamPermissionsRegionNetworkFirewallPolicyRequest, TestIamPermissionsReservationRequest, @@ -1511,6 +1569,7 @@ TestPermissionsRequest, TestPermissionsResponse, Uint128, + UpcomingMaintenance, UpdateAccessConfigInstanceRequest, UpdateAutoscalerRequest, UpdateBackendBucketRequest, @@ -1577,6 +1636,8 @@ Warning, Warnings, WeightedBackendService, + WithdrawPublicAdvertisedPrefixeRequest, + WithdrawPublicDelegatedPrefixeRequest, XpnHostList, XpnResourceId, Zone, @@ -1660,6 +1721,7 @@ "SecurityPoliciesClient", "ServiceAttachmentsClient", "SnapshotsClient", + "SnapshotSettingsServiceClient", "SslCertificatesClient", "SslPoliciesClient", "SubnetworksClient", @@ -1704,6 +1766,7 @@ "AddRuleFirewallPolicyRequest", "AddRuleNetworkFirewallPolicyRequest", "AddRuleRegionNetworkFirewallPolicyRequest", + "AddRuleRegionSecurityPolicyRequest", "AddRuleSecurityPolicyRequest", "AddSignedUrlKeyBackendBucketRequest", "AddSignedUrlKeyBackendServiceRequest", @@ -1756,6 +1819,8 @@ "AllocationSpecificSKUAllocationReservedInstanceProperties", "AllocationSpecificSKUReservation", "Allowed", + "AnnouncePublicAdvertisedPrefixeRequest", + "AnnouncePublicDelegatedPrefixeRequest", "ApplyUpdatesToInstancesInstanceGroupManagerRequest", "ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest", "AttachDiskInstanceRequest", @@ -1763,6 +1828,7 @@ "AttachedDiskInitializeParams", "AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest", "AttachNetworkEndpointsNetworkEndpointGroupRequest", + "AttachNetworkEndpointsRegionNetworkEndpointGroupRequest", "AuditConfig", "AuditLogConfig", "AuthorizationLoggingOptions", @@ -1777,6 +1843,7 @@ "AutoscalingPolicyLoadBalancingUtilization", "AutoscalingPolicyScaleInControl", "AutoscalingPolicyScalingSchedule", + "AWSV4Signature", "Backend", "BackendBucket", "BackendBucketCdnPolicy", @@ -1794,12 +1861,14 @@ "BackendServiceGroupHealth", "BackendServiceIAP", "BackendServiceList", + "BackendServiceListUsable", "BackendServiceLocalityLoadBalancingPolicyConfig", "BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy", "BackendServiceLocalityLoadBalancingPolicyConfigPolicy", "BackendServiceLogConfig", "BackendServiceReference", "BackendServicesScopedList", + "BackendServiceUsedBy", "BfdPacket", "BfdStatus", "BfdStatusPacketCounts", @@ -1809,6 +1878,7 @@ "BulkInsertInstanceRequest", "BulkInsertInstanceResource", "BulkInsertInstanceResourcePerInstanceProperties", + "BulkInsertOperationStatus", "BulkInsertRegionDiskRequest", "BulkInsertRegionInstanceRequest", "CacheInvalidationRule", @@ -1927,6 +1997,7 @@ "DetachDiskInstanceRequest", "DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest", "DetachNetworkEndpointsNetworkEndpointGroupRequest", + "DetachNetworkEndpointsRegionNetworkEndpointGroupRequest", "DisableXpnHostProjectRequest", "DisableXpnResourceProjectRequest", "Disk", @@ -2015,6 +2086,7 @@ "GetHealthCheckRequest", "GetHealthRegionBackendServiceRequest", "GetHealthTargetPoolRequest", + "GetIamPolicyBackendBucketRequest", "GetIamPolicyBackendServiceRequest", "GetIamPolicyDiskRequest", "GetIamPolicyFirewallPolicyRequest", @@ -2049,6 +2121,8 @@ "GetLicenseRequest", "GetMachineImageRequest", "GetMachineTypeRequest", + "GetMacsecConfigInterconnectRequest", + "GetNatIpInfoRouterRequest", "GetNatMappingInfoRoutersRequest", "GetNetworkAttachmentRequest", "GetNetworkEdgeSecurityServiceRequest", @@ -2092,6 +2166,7 @@ "GetRuleFirewallPolicyRequest", "GetRuleNetworkFirewallPolicyRequest", "GetRuleRegionNetworkFirewallPolicyRequest", + "GetRuleRegionSecurityPolicyRequest", "GetRuleSecurityPolicyRequest", "GetScreenshotInstanceRequest", "GetSecurityPolicyRequest", @@ -2099,6 +2174,7 @@ "GetServiceAttachmentRequest", "GetShieldedInstanceIdentityInstanceRequest", "GetSnapshotRequest", + "GetSnapshotSettingRequest", "GetSslCertificateRequest", "GetSslPolicyRequest", "GetStatusVpnGatewayRequest", @@ -2283,6 +2359,7 @@ "InstanceProperties", "InstanceReference", "InstancesAddResourcePoliciesRequest", + "InstancesBulkInsertOperationMetadata", "InstancesGetEffectiveFirewallsResponse", "InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", "InstancesRemoveResourcePoliciesRequest", @@ -2292,6 +2369,7 @@ "InstancesSetMachineTypeRequest", "InstancesSetMinCpuPlatformRequest", "InstancesSetNameRequest", + "InstancesSetSecurityPolicyRequest", "InstancesSetServiceAccountRequest", "InstancesStartWithEncryptionKeyRequest", "InstanceTemplate", @@ -2315,10 +2393,15 @@ "InterconnectDiagnosticsLinkLACPStatus", "InterconnectDiagnosticsLinkOpticalPower", "InterconnectDiagnosticsLinkStatus", + "InterconnectDiagnosticsMacsecStatus", "InterconnectList", "InterconnectLocation", "InterconnectLocationList", "InterconnectLocationRegionInfo", + "InterconnectMacsec", + "InterconnectMacsecConfig", + "InterconnectMacsecConfigPreSharedKey", + "InterconnectMacsecPreSharedKey", "InterconnectOutageNotification", "InterconnectRemoteLocation", "InterconnectRemoteLocationConstraints", @@ -2326,6 +2409,7 @@ "InterconnectRemoteLocationList", "InterconnectRemoteLocationPermittedConnections", "InterconnectsGetDiagnosticsResponse", + "InterconnectsGetMacsecConfigResponse", "InvalidateCacheUrlMapRequest", "Items", "License", @@ -2377,6 +2461,7 @@ "ListNetworkEndpointGroupsRequest", "ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest", "ListNetworkEndpointsNetworkEndpointGroupsRequest", + "ListNetworkEndpointsRegionNetworkEndpointGroupsRequest", "ListNetworkFirewallPoliciesRequest", "ListNetworksRequest", "ListNodeGroupsRequest", @@ -2432,6 +2517,8 @@ "ListTargetTcpProxiesRequest", "ListTargetVpnGatewaysRequest", "ListUrlMapsRequest", + "ListUsableBackendServicesRequest", + "ListUsableRegionBackendServicesRequest", "ListUsableSubnetworksRequest", "ListVpnGatewaysRequest", "ListVpnTunnelsRequest", @@ -2467,6 +2554,9 @@ "MoveGlobalAddressRequest", "MoveInstanceProjectRequest", "NamedPort", + "NatIpInfo", + "NatIpInfoNatIpInfoMapping", + "NatIpInfoResponse", "Network", "NetworkAttachment", "NetworkAttachmentAggregatedList", @@ -2553,6 +2643,7 @@ "PatchInstanceGroupManagerRequest", "PatchInterconnectAttachmentRequest", "PatchInterconnectRequest", + "PatchNetworkAttachmentRequest", "PatchNetworkEdgeSecurityServiceRequest", "PatchNetworkFirewallPolicyRequest", "PatchNetworkRequest", @@ -2577,9 +2668,11 @@ "PatchRuleFirewallPolicyRequest", "PatchRuleNetworkFirewallPolicyRequest", "PatchRuleRegionNetworkFirewallPolicyRequest", + "PatchRuleRegionSecurityPolicyRequest", "PatchRuleSecurityPolicyRequest", "PatchSecurityPolicyRequest", "PatchServiceAttachmentRequest", + "PatchSnapshotSettingRequest", "PatchSslPolicyRequest", "PatchSubnetworkRequest", "PatchTargetGrpcProxyRequest", @@ -2593,6 +2686,8 @@ "PreconfiguredWafSet", "PreservedState", "PreservedStatePreservedDisk", + "PreservedStatePreservedNetworkIp", + "PreservedStatePreservedNetworkIpIpAddress", "PreviewRouterRequest", "Project", "ProjectsDisableXpnResourceRequest", @@ -2641,6 +2736,8 @@ "RegionInstanceGroupsListInstancesRequest", "RegionInstanceGroupsSetNamedPortsRequest", "RegionList", + "RegionNetworkEndpointGroupsAttachEndpointsRequest", + "RegionNetworkEndpointGroupsDetachEndpointsRequest", "RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse", "RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", "RegionSetLabelsRequest", @@ -2660,6 +2757,7 @@ "RemoveRuleFirewallPolicyRequest", "RemoveRuleNetworkFirewallPolicyRequest", "RemoveRuleRegionNetworkFirewallPolicyRequest", + "RemoveRuleRegionSecurityPolicyRequest", "RemoveRuleSecurityPolicyRequest", "RequestMirrorPolicy", "Reservation", @@ -2736,6 +2834,7 @@ "SecurityPolicy", "SecurityPolicyAdaptiveProtectionConfig", "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig", + "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig", "SecurityPolicyAdvancedOptionsConfig", "SecurityPolicyAdvancedOptionsConfigJsonCustomConfig", "SecurityPolicyDdosProtectionConfig", @@ -2747,6 +2846,8 @@ "SecurityPolicyRuleHttpHeaderActionHttpHeaderOption", "SecurityPolicyRuleMatcher", "SecurityPolicyRuleMatcherConfig", + "SecurityPolicyRuleNetworkMatcher", + "SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch", "SecurityPolicyRulePreconfiguredWafConfig", "SecurityPolicyRulePreconfiguredWafConfigExclusion", "SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams", @@ -2754,6 +2855,7 @@ "SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig", "SecurityPolicyRuleRateLimitOptionsThreshold", "SecurityPolicyRuleRedirectOptions", + "SecurityPolicyUserDefinedField", "SecuritySettings", "SendDiagnosticInterruptInstanceRequest", "SendDiagnosticInterruptInstanceResponse", @@ -2771,12 +2873,15 @@ "SetBackupTargetPoolRequest", "SetCertificateMapTargetHttpsProxyRequest", "SetCertificateMapTargetSslProxyRequest", + "SetCommonInstanceMetadataOperationMetadata", + "SetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo", "SetCommonInstanceMetadataProjectRequest", "SetDefaultNetworkTierProjectRequest", "SetDeletionProtectionInstanceRequest", "SetDiskAutoDeleteInstanceRequest", "SetEdgeSecurityPolicyBackendBucketRequest", "SetEdgeSecurityPolicyBackendServiceRequest", + "SetIamPolicyBackendBucketRequest", "SetIamPolicyBackendServiceRequest", "SetIamPolicyDiskRequest", "SetIamPolicyFirewallPolicyRequest", @@ -2829,6 +2934,10 @@ "SetQuicOverrideTargetHttpsProxyRequest", "SetSchedulingInstanceRequest", "SetSecurityPolicyBackendServiceRequest", + "SetSecurityPolicyInstanceRequest", + "SetSecurityPolicyRegionBackendServiceRequest", + "SetSecurityPolicyTargetInstanceRequest", + "SetSecurityPolicyTargetPoolRequest", "SetServiceAccountInstanceRequest", "SetShieldedInstanceIntegrityPolicyInstanceRequest", "SetSslCertificatesRegionTargetHttpsProxyRequest", @@ -2857,6 +2966,9 @@ "SimulateMaintenanceEventNodeGroupRequest", "Snapshot", "SnapshotList", + "SnapshotSettings", + "SnapshotSettingsStorageLocationSettings", + "SnapshotSettingsStorageLocationSettingsStorageLocationPreference", "SourceDiskEncryptionKey", "SourceInstanceParams", "SourceInstanceProperties", @@ -2880,6 +2992,8 @@ "StatefulPolicy", "StatefulPolicyPreservedState", "StatefulPolicyPreservedStateDiskDevice", + "StatefulPolicyPreservedStateNetworkIp", + "Status", "StopAsyncReplicationDiskRequest", "StopAsyncReplicationRegionDiskRequest", "StopGroupAsyncReplicationDiskRequest", @@ -2942,6 +3056,8 @@ "TargetVpnGatewaysScopedList", "TCPHealthCheck", "TestFailure", + "TestIamPermissionsBackendBucketRequest", + "TestIamPermissionsBackendServiceRequest", "TestIamPermissionsDiskRequest", "TestIamPermissionsExternalVpnGatewayRequest", "TestIamPermissionsFirewallPolicyRequest", @@ -2957,6 +3073,7 @@ "TestIamPermissionsNodeGroupRequest", "TestIamPermissionsNodeTemplateRequest", "TestIamPermissionsPacketMirroringRequest", + "TestIamPermissionsRegionBackendServiceRequest", "TestIamPermissionsRegionDiskRequest", "TestIamPermissionsRegionNetworkFirewallPolicyRequest", "TestIamPermissionsReservationRequest", @@ -2968,6 +3085,7 @@ "TestPermissionsRequest", "TestPermissionsResponse", "Uint128", + "UpcomingMaintenance", "UpdateAccessConfigInstanceRequest", "UpdateAutoscalerRequest", "UpdateBackendBucketRequest", @@ -3034,6 +3152,8 @@ "Warning", "Warnings", "WeightedBackendService", + "WithdrawPublicAdvertisedPrefixeRequest", + "WithdrawPublicDelegatedPrefixeRequest", "XpnHostList", "XpnResourceId", "Zone", diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py index 55da57749aae..d257cb9410fc 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py @@ -95,6 +95,7 @@ from .services.routes import RoutesClient from .services.security_policies import SecurityPoliciesClient from .services.service_attachments import ServiceAttachmentsClient +from .services.snapshot_settings_service import SnapshotSettingsServiceClient from .services.snapshots import SnapshotsClient from .services.ssl_certificates import SslCertificatesClient from .services.ssl_policies import SslPoliciesClient @@ -141,6 +142,7 @@ AddRuleFirewallPolicyRequest, AddRuleNetworkFirewallPolicyRequest, AddRuleRegionNetworkFirewallPolicyRequest, + AddRuleRegionSecurityPolicyRequest, AddRuleSecurityPolicyRequest, AddSignedUrlKeyBackendBucketRequest, AddSignedUrlKeyBackendServiceRequest, @@ -193,6 +195,8 @@ AllocationSpecificSKUAllocationReservedInstanceProperties, AllocationSpecificSKUReservation, Allowed, + AnnouncePublicAdvertisedPrefixeRequest, + AnnouncePublicDelegatedPrefixeRequest, ApplyUpdatesToInstancesInstanceGroupManagerRequest, ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, AttachDiskInstanceRequest, @@ -200,6 +204,7 @@ AttachedDiskInitializeParams, AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, AttachNetworkEndpointsNetworkEndpointGroupRequest, + AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, AuditConfig, AuditLogConfig, AuthorizationLoggingOptions, @@ -214,6 +219,7 @@ AutoscalingPolicyLoadBalancingUtilization, AutoscalingPolicyScaleInControl, AutoscalingPolicyScalingSchedule, + AWSV4Signature, Backend, BackendBucket, BackendBucketCdnPolicy, @@ -231,12 +237,14 @@ BackendServiceGroupHealth, BackendServiceIAP, BackendServiceList, + BackendServiceListUsable, BackendServiceLocalityLoadBalancingPolicyConfig, BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy, BackendServiceLocalityLoadBalancingPolicyConfigPolicy, BackendServiceLogConfig, BackendServiceReference, BackendServicesScopedList, + BackendServiceUsedBy, BfdPacket, BfdStatus, BfdStatusPacketCounts, @@ -246,6 +254,7 @@ BulkInsertInstanceRequest, BulkInsertInstanceResource, BulkInsertInstanceResourcePerInstanceProperties, + BulkInsertOperationStatus, BulkInsertRegionDiskRequest, BulkInsertRegionInstanceRequest, CacheInvalidationRule, @@ -364,6 +373,7 @@ DetachDiskInstanceRequest, DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, DetachNetworkEndpointsNetworkEndpointGroupRequest, + DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, DisableXpnHostProjectRequest, DisableXpnResourceProjectRequest, Disk, @@ -452,6 +462,7 @@ GetHealthCheckRequest, GetHealthRegionBackendServiceRequest, GetHealthTargetPoolRequest, + GetIamPolicyBackendBucketRequest, GetIamPolicyBackendServiceRequest, GetIamPolicyDiskRequest, GetIamPolicyFirewallPolicyRequest, @@ -486,6 +497,8 @@ GetLicenseRequest, GetMachineImageRequest, GetMachineTypeRequest, + GetMacsecConfigInterconnectRequest, + GetNatIpInfoRouterRequest, GetNatMappingInfoRoutersRequest, GetNetworkAttachmentRequest, GetNetworkEdgeSecurityServiceRequest, @@ -529,6 +542,7 @@ GetRuleFirewallPolicyRequest, GetRuleNetworkFirewallPolicyRequest, GetRuleRegionNetworkFirewallPolicyRequest, + GetRuleRegionSecurityPolicyRequest, GetRuleSecurityPolicyRequest, GetScreenshotInstanceRequest, GetSecurityPolicyRequest, @@ -536,6 +550,7 @@ GetServiceAttachmentRequest, GetShieldedInstanceIdentityInstanceRequest, GetSnapshotRequest, + GetSnapshotSettingRequest, GetSslCertificateRequest, GetSslPolicyRequest, GetStatusVpnGatewayRequest, @@ -720,6 +735,7 @@ InstanceProperties, InstanceReference, InstancesAddResourcePoliciesRequest, + InstancesBulkInsertOperationMetadata, InstancesGetEffectiveFirewallsResponse, InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, InstancesRemoveResourcePoliciesRequest, @@ -729,6 +745,7 @@ InstancesSetMachineTypeRequest, InstancesSetMinCpuPlatformRequest, InstancesSetNameRequest, + InstancesSetSecurityPolicyRequest, InstancesSetServiceAccountRequest, InstancesStartWithEncryptionKeyRequest, InstanceTemplate, @@ -752,10 +769,15 @@ InterconnectDiagnosticsLinkLACPStatus, InterconnectDiagnosticsLinkOpticalPower, InterconnectDiagnosticsLinkStatus, + InterconnectDiagnosticsMacsecStatus, InterconnectList, InterconnectLocation, InterconnectLocationList, InterconnectLocationRegionInfo, + InterconnectMacsec, + InterconnectMacsecConfig, + InterconnectMacsecConfigPreSharedKey, + InterconnectMacsecPreSharedKey, InterconnectOutageNotification, InterconnectRemoteLocation, InterconnectRemoteLocationConstraints, @@ -763,6 +785,7 @@ InterconnectRemoteLocationList, InterconnectRemoteLocationPermittedConnections, InterconnectsGetDiagnosticsResponse, + InterconnectsGetMacsecConfigResponse, InvalidateCacheUrlMapRequest, Items, License, @@ -814,6 +837,7 @@ ListNetworkEndpointGroupsRequest, ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, ListNetworkEndpointsNetworkEndpointGroupsRequest, + ListNetworkEndpointsRegionNetworkEndpointGroupsRequest, ListNetworkFirewallPoliciesRequest, ListNetworksRequest, ListNodeGroupsRequest, @@ -869,6 +893,8 @@ ListTargetTcpProxiesRequest, ListTargetVpnGatewaysRequest, ListUrlMapsRequest, + ListUsableBackendServicesRequest, + ListUsableRegionBackendServicesRequest, ListUsableSubnetworksRequest, ListVpnGatewaysRequest, ListVpnTunnelsRequest, @@ -904,6 +930,9 @@ MoveGlobalAddressRequest, MoveInstanceProjectRequest, NamedPort, + NatIpInfo, + NatIpInfoNatIpInfoMapping, + NatIpInfoResponse, Network, NetworkAttachment, NetworkAttachmentAggregatedList, @@ -990,6 +1019,7 @@ PatchInstanceGroupManagerRequest, PatchInterconnectAttachmentRequest, PatchInterconnectRequest, + PatchNetworkAttachmentRequest, PatchNetworkEdgeSecurityServiceRequest, PatchNetworkFirewallPolicyRequest, PatchNetworkRequest, @@ -1014,9 +1044,11 @@ PatchRuleFirewallPolicyRequest, PatchRuleNetworkFirewallPolicyRequest, PatchRuleRegionNetworkFirewallPolicyRequest, + PatchRuleRegionSecurityPolicyRequest, PatchRuleSecurityPolicyRequest, PatchSecurityPolicyRequest, PatchServiceAttachmentRequest, + PatchSnapshotSettingRequest, PatchSslPolicyRequest, PatchSubnetworkRequest, PatchTargetGrpcProxyRequest, @@ -1030,6 +1062,8 @@ PreconfiguredWafSet, PreservedState, PreservedStatePreservedDisk, + PreservedStatePreservedNetworkIp, + PreservedStatePreservedNetworkIpIpAddress, PreviewRouterRequest, Project, ProjectsDisableXpnResourceRequest, @@ -1078,6 +1112,8 @@ RegionInstanceGroupsListInstancesRequest, RegionInstanceGroupsSetNamedPortsRequest, RegionList, + RegionNetworkEndpointGroupsAttachEndpointsRequest, + RegionNetworkEndpointGroupsDetachEndpointsRequest, RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse, RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, RegionSetLabelsRequest, @@ -1097,6 +1133,7 @@ RemoveRuleFirewallPolicyRequest, RemoveRuleNetworkFirewallPolicyRequest, RemoveRuleRegionNetworkFirewallPolicyRequest, + RemoveRuleRegionSecurityPolicyRequest, RemoveRuleSecurityPolicyRequest, RequestMirrorPolicy, Reservation, @@ -1173,6 +1210,7 @@ SecurityPolicy, SecurityPolicyAdaptiveProtectionConfig, SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig, + SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig, SecurityPolicyAdvancedOptionsConfig, SecurityPolicyAdvancedOptionsConfigJsonCustomConfig, SecurityPolicyDdosProtectionConfig, @@ -1184,6 +1222,8 @@ SecurityPolicyRuleHttpHeaderActionHttpHeaderOption, SecurityPolicyRuleMatcher, SecurityPolicyRuleMatcherConfig, + SecurityPolicyRuleNetworkMatcher, + SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch, SecurityPolicyRulePreconfiguredWafConfig, SecurityPolicyRulePreconfiguredWafConfigExclusion, SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams, @@ -1191,6 +1231,7 @@ SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig, SecurityPolicyRuleRateLimitOptionsThreshold, SecurityPolicyRuleRedirectOptions, + SecurityPolicyUserDefinedField, SecuritySettings, SendDiagnosticInterruptInstanceRequest, SendDiagnosticInterruptInstanceResponse, @@ -1208,12 +1249,15 @@ SetBackupTargetPoolRequest, SetCertificateMapTargetHttpsProxyRequest, SetCertificateMapTargetSslProxyRequest, + SetCommonInstanceMetadataOperationMetadata, + SetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo, SetCommonInstanceMetadataProjectRequest, SetDefaultNetworkTierProjectRequest, SetDeletionProtectionInstanceRequest, SetDiskAutoDeleteInstanceRequest, SetEdgeSecurityPolicyBackendBucketRequest, SetEdgeSecurityPolicyBackendServiceRequest, + SetIamPolicyBackendBucketRequest, SetIamPolicyBackendServiceRequest, SetIamPolicyDiskRequest, SetIamPolicyFirewallPolicyRequest, @@ -1266,6 +1310,10 @@ SetQuicOverrideTargetHttpsProxyRequest, SetSchedulingInstanceRequest, SetSecurityPolicyBackendServiceRequest, + SetSecurityPolicyInstanceRequest, + SetSecurityPolicyRegionBackendServiceRequest, + SetSecurityPolicyTargetInstanceRequest, + SetSecurityPolicyTargetPoolRequest, SetServiceAccountInstanceRequest, SetShieldedInstanceIntegrityPolicyInstanceRequest, SetSslCertificatesRegionTargetHttpsProxyRequest, @@ -1294,6 +1342,9 @@ SimulateMaintenanceEventNodeGroupRequest, Snapshot, SnapshotList, + SnapshotSettings, + SnapshotSettingsStorageLocationSettings, + SnapshotSettingsStorageLocationSettingsStorageLocationPreference, SourceDiskEncryptionKey, SourceInstanceParams, SourceInstanceProperties, @@ -1317,6 +1368,8 @@ StatefulPolicy, StatefulPolicyPreservedState, StatefulPolicyPreservedStateDiskDevice, + StatefulPolicyPreservedStateNetworkIp, + Status, StopAsyncReplicationDiskRequest, StopAsyncReplicationRegionDiskRequest, StopGroupAsyncReplicationDiskRequest, @@ -1379,6 +1432,8 @@ TargetVpnGatewaysScopedList, TCPHealthCheck, TestFailure, + TestIamPermissionsBackendBucketRequest, + TestIamPermissionsBackendServiceRequest, TestIamPermissionsDiskRequest, TestIamPermissionsExternalVpnGatewayRequest, TestIamPermissionsFirewallPolicyRequest, @@ -1394,6 +1449,7 @@ TestIamPermissionsNodeGroupRequest, TestIamPermissionsNodeTemplateRequest, TestIamPermissionsPacketMirroringRequest, + TestIamPermissionsRegionBackendServiceRequest, TestIamPermissionsRegionDiskRequest, TestIamPermissionsRegionNetworkFirewallPolicyRequest, TestIamPermissionsReservationRequest, @@ -1405,6 +1461,7 @@ TestPermissionsRequest, TestPermissionsResponse, Uint128, + UpcomingMaintenance, UpdateAccessConfigInstanceRequest, UpdateAutoscalerRequest, UpdateBackendBucketRequest, @@ -1471,6 +1528,8 @@ Warning, Warnings, WeightedBackendService, + WithdrawPublicAdvertisedPrefixeRequest, + WithdrawPublicDelegatedPrefixeRequest, XpnHostList, XpnResourceId, Zone, @@ -1480,6 +1539,7 @@ ) __all__ = ( + "AWSV4Signature", "AbandonInstancesInstanceGroupManagerRequest", "AbandonInstancesRegionInstanceGroupManagerRequest", "AcceleratorConfig", @@ -1505,6 +1565,7 @@ "AddRuleFirewallPolicyRequest", "AddRuleNetworkFirewallPolicyRequest", "AddRuleRegionNetworkFirewallPolicyRequest", + "AddRuleRegionSecurityPolicyRequest", "AddRuleSecurityPolicyRequest", "AddSignedUrlKeyBackendBucketRequest", "AddSignedUrlKeyBackendServiceRequest", @@ -1562,11 +1623,14 @@ "AllocationSpecificSKUAllocationReservedInstanceProperties", "AllocationSpecificSKUReservation", "Allowed", + "AnnouncePublicAdvertisedPrefixeRequest", + "AnnouncePublicDelegatedPrefixeRequest", "ApplyUpdatesToInstancesInstanceGroupManagerRequest", "ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest", "AttachDiskInstanceRequest", "AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest", "AttachNetworkEndpointsNetworkEndpointGroupRequest", + "AttachNetworkEndpointsRegionNetworkEndpointGroupRequest", "AttachedDisk", "AttachedDiskInitializeParams", "AuditConfig", @@ -1602,11 +1666,13 @@ "BackendServiceGroupHealth", "BackendServiceIAP", "BackendServiceList", + "BackendServiceListUsable", "BackendServiceLocalityLoadBalancingPolicyConfig", "BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy", "BackendServiceLocalityLoadBalancingPolicyConfigPolicy", "BackendServiceLogConfig", "BackendServiceReference", + "BackendServiceUsedBy", "BackendServicesClient", "BackendServicesScopedList", "BfdPacket", @@ -1618,6 +1684,7 @@ "BulkInsertInstanceRequest", "BulkInsertInstanceResource", "BulkInsertInstanceResourcePerInstanceProperties", + "BulkInsertOperationStatus", "BulkInsertRegionDiskRequest", "BulkInsertRegionInstanceRequest", "CacheInvalidationRule", @@ -1736,6 +1803,7 @@ "DetachDiskInstanceRequest", "DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest", "DetachNetworkEndpointsNetworkEndpointGroupRequest", + "DetachNetworkEndpointsRegionNetworkEndpointGroupRequest", "DisableXpnHostProjectRequest", "DisableXpnResourceProjectRequest", "Disk", @@ -1831,6 +1899,7 @@ "GetHealthCheckRequest", "GetHealthRegionBackendServiceRequest", "GetHealthTargetPoolRequest", + "GetIamPolicyBackendBucketRequest", "GetIamPolicyBackendServiceRequest", "GetIamPolicyDiskRequest", "GetIamPolicyFirewallPolicyRequest", @@ -1865,6 +1934,8 @@ "GetLicenseRequest", "GetMachineImageRequest", "GetMachineTypeRequest", + "GetMacsecConfigInterconnectRequest", + "GetNatIpInfoRouterRequest", "GetNatMappingInfoRoutersRequest", "GetNetworkAttachmentRequest", "GetNetworkEdgeSecurityServiceRequest", @@ -1908,6 +1979,7 @@ "GetRuleFirewallPolicyRequest", "GetRuleNetworkFirewallPolicyRequest", "GetRuleRegionNetworkFirewallPolicyRequest", + "GetRuleRegionSecurityPolicyRequest", "GetRuleSecurityPolicyRequest", "GetScreenshotInstanceRequest", "GetSecurityPolicyRequest", @@ -1915,6 +1987,7 @@ "GetServiceAttachmentRequest", "GetShieldedInstanceIdentityInstanceRequest", "GetSnapshotRequest", + "GetSnapshotSettingRequest", "GetSslCertificateRequest", "GetSslPolicyRequest", "GetStatusVpnGatewayRequest", @@ -2115,6 +2188,7 @@ "InstanceTemplatesScopedList", "InstanceWithNamedPorts", "InstancesAddResourcePoliciesRequest", + "InstancesBulkInsertOperationMetadata", "InstancesClient", "InstancesGetEffectiveFirewallsResponse", "InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", @@ -2125,6 +2199,7 @@ "InstancesSetMachineTypeRequest", "InstancesSetMinCpuPlatformRequest", "InstancesSetNameRequest", + "InstancesSetSecurityPolicyRequest", "InstancesSetServiceAccountRequest", "InstancesStartWithEncryptionKeyRequest", "Int64RangeMatch", @@ -2144,11 +2219,16 @@ "InterconnectDiagnosticsLinkLACPStatus", "InterconnectDiagnosticsLinkOpticalPower", "InterconnectDiagnosticsLinkStatus", + "InterconnectDiagnosticsMacsecStatus", "InterconnectList", "InterconnectLocation", "InterconnectLocationList", "InterconnectLocationRegionInfo", "InterconnectLocationsClient", + "InterconnectMacsec", + "InterconnectMacsecConfig", + "InterconnectMacsecConfigPreSharedKey", + "InterconnectMacsecPreSharedKey", "InterconnectOutageNotification", "InterconnectRemoteLocation", "InterconnectRemoteLocationConstraints", @@ -2158,6 +2238,7 @@ "InterconnectRemoteLocationsClient", "InterconnectsClient", "InterconnectsGetDiagnosticsResponse", + "InterconnectsGetMacsecConfigResponse", "InvalidateCacheUrlMapRequest", "Items", "License", @@ -2211,6 +2292,7 @@ "ListNetworkEndpointGroupsRequest", "ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest", "ListNetworkEndpointsNetworkEndpointGroupsRequest", + "ListNetworkEndpointsRegionNetworkEndpointGroupsRequest", "ListNetworkFirewallPoliciesRequest", "ListNetworksRequest", "ListNodeGroupsRequest", @@ -2266,6 +2348,8 @@ "ListTargetTcpProxiesRequest", "ListTargetVpnGatewaysRequest", "ListUrlMapsRequest", + "ListUsableBackendServicesRequest", + "ListUsableRegionBackendServicesRequest", "ListUsableSubnetworksRequest", "ListVpnGatewaysRequest", "ListVpnTunnelsRequest", @@ -2303,6 +2387,9 @@ "MoveGlobalAddressRequest", "MoveInstanceProjectRequest", "NamedPort", + "NatIpInfo", + "NatIpInfoNatIpInfoMapping", + "NatIpInfoResponse", "Network", "NetworkAttachment", "NetworkAttachmentAggregatedList", @@ -2398,6 +2485,7 @@ "PatchInstanceGroupManagerRequest", "PatchInterconnectAttachmentRequest", "PatchInterconnectRequest", + "PatchNetworkAttachmentRequest", "PatchNetworkEdgeSecurityServiceRequest", "PatchNetworkFirewallPolicyRequest", "PatchNetworkRequest", @@ -2422,9 +2510,11 @@ "PatchRuleFirewallPolicyRequest", "PatchRuleNetworkFirewallPolicyRequest", "PatchRuleRegionNetworkFirewallPolicyRequest", + "PatchRuleRegionSecurityPolicyRequest", "PatchRuleSecurityPolicyRequest", "PatchSecurityPolicyRequest", "PatchServiceAttachmentRequest", + "PatchSnapshotSettingRequest", "PatchSslPolicyRequest", "PatchSubnetworkRequest", "PatchTargetGrpcProxyRequest", @@ -2438,6 +2528,8 @@ "PreconfiguredWafSet", "PreservedState", "PreservedStatePreservedDisk", + "PreservedStatePreservedNetworkIp", + "PreservedStatePreservedNetworkIpIpAddress", "PreviewRouterRequest", "Project", "ProjectsClient", @@ -2500,7 +2592,9 @@ "RegionInstanceTemplatesClient", "RegionInstancesClient", "RegionList", + "RegionNetworkEndpointGroupsAttachEndpointsRequest", "RegionNetworkEndpointGroupsClient", + "RegionNetworkEndpointGroupsDetachEndpointsRequest", "RegionNetworkFirewallPoliciesClient", "RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse", "RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", @@ -2531,6 +2625,7 @@ "RemoveRuleFirewallPolicyRequest", "RemoveRuleNetworkFirewallPolicyRequest", "RemoveRuleRegionNetworkFirewallPolicyRequest", + "RemoveRuleRegionSecurityPolicyRequest", "RemoveRuleSecurityPolicyRequest", "RequestMirrorPolicy", "Reservation", @@ -2613,6 +2708,7 @@ "SecurityPolicy", "SecurityPolicyAdaptiveProtectionConfig", "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig", + "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig", "SecurityPolicyAdvancedOptionsConfig", "SecurityPolicyAdvancedOptionsConfigJsonCustomConfig", "SecurityPolicyDdosProtectionConfig", @@ -2624,6 +2720,8 @@ "SecurityPolicyRuleHttpHeaderActionHttpHeaderOption", "SecurityPolicyRuleMatcher", "SecurityPolicyRuleMatcherConfig", + "SecurityPolicyRuleNetworkMatcher", + "SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch", "SecurityPolicyRulePreconfiguredWafConfig", "SecurityPolicyRulePreconfiguredWafConfigExclusion", "SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams", @@ -2631,6 +2729,7 @@ "SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig", "SecurityPolicyRuleRateLimitOptionsThreshold", "SecurityPolicyRuleRedirectOptions", + "SecurityPolicyUserDefinedField", "SecuritySettings", "SendDiagnosticInterruptInstanceRequest", "SendDiagnosticInterruptInstanceResponse", @@ -2649,12 +2748,15 @@ "SetBackupTargetPoolRequest", "SetCertificateMapTargetHttpsProxyRequest", "SetCertificateMapTargetSslProxyRequest", + "SetCommonInstanceMetadataOperationMetadata", + "SetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo", "SetCommonInstanceMetadataProjectRequest", "SetDefaultNetworkTierProjectRequest", "SetDeletionProtectionInstanceRequest", "SetDiskAutoDeleteInstanceRequest", "SetEdgeSecurityPolicyBackendBucketRequest", "SetEdgeSecurityPolicyBackendServiceRequest", + "SetIamPolicyBackendBucketRequest", "SetIamPolicyBackendServiceRequest", "SetIamPolicyDiskRequest", "SetIamPolicyFirewallPolicyRequest", @@ -2707,6 +2809,10 @@ "SetQuicOverrideTargetHttpsProxyRequest", "SetSchedulingInstanceRequest", "SetSecurityPolicyBackendServiceRequest", + "SetSecurityPolicyInstanceRequest", + "SetSecurityPolicyRegionBackendServiceRequest", + "SetSecurityPolicyTargetInstanceRequest", + "SetSecurityPolicyTargetPoolRequest", "SetServiceAccountInstanceRequest", "SetShieldedInstanceIntegrityPolicyInstanceRequest", "SetSslCertificatesRegionTargetHttpsProxyRequest", @@ -2735,6 +2841,10 @@ "SimulateMaintenanceEventNodeGroupRequest", "Snapshot", "SnapshotList", + "SnapshotSettings", + "SnapshotSettingsServiceClient", + "SnapshotSettingsStorageLocationSettings", + "SnapshotSettingsStorageLocationSettingsStorageLocationPreference", "SnapshotsClient", "SourceDiskEncryptionKey", "SourceInstanceParams", @@ -2760,6 +2870,8 @@ "StatefulPolicy", "StatefulPolicyPreservedState", "StatefulPolicyPreservedStateDiskDevice", + "StatefulPolicyPreservedStateNetworkIp", + "Status", "StopAsyncReplicationDiskRequest", "StopAsyncReplicationRegionDiskRequest", "StopGroupAsyncReplicationDiskRequest", @@ -2831,6 +2943,8 @@ "TargetVpnGatewaysClient", "TargetVpnGatewaysScopedList", "TestFailure", + "TestIamPermissionsBackendBucketRequest", + "TestIamPermissionsBackendServiceRequest", "TestIamPermissionsDiskRequest", "TestIamPermissionsExternalVpnGatewayRequest", "TestIamPermissionsFirewallPolicyRequest", @@ -2846,6 +2960,7 @@ "TestIamPermissionsNodeGroupRequest", "TestIamPermissionsNodeTemplateRequest", "TestIamPermissionsPacketMirroringRequest", + "TestIamPermissionsRegionBackendServiceRequest", "TestIamPermissionsRegionDiskRequest", "TestIamPermissionsRegionNetworkFirewallPolicyRequest", "TestIamPermissionsReservationRequest", @@ -2857,6 +2972,7 @@ "TestPermissionsRequest", "TestPermissionsResponse", "Uint128", + "UpcomingMaintenance", "UpdateAccessConfigInstanceRequest", "UpdateAutoscalerRequest", "UpdateBackendBucketRequest", @@ -2926,6 +3042,8 @@ "Warning", "Warnings", "WeightedBackendService", + "WithdrawPublicAdvertisedPrefixeRequest", + "WithdrawPublicDelegatedPrefixeRequest", "XpnHostList", "XpnResourceId", "Zone", diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json index 0f722bc68444..21fd268a124e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json +++ b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json @@ -142,6 +142,11 @@ "get" ] }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, "Insert": { "methods": [ "insert" @@ -162,6 +167,16 @@ "set_edge_security_policy" ] }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, "Update": { "methods": [ "update" @@ -221,6 +236,11 @@ "list" ] }, + "ListUsable": { + "methods": [ + "list_usable" + ] + }, "Patch": { "methods": [ "patch" @@ -241,6 +261,11 @@ "set_security_policy" ] }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, "Update": { "methods": [ "update" @@ -1317,6 +1342,11 @@ "set_scheduling" ] }, + "SetSecurityPolicy": { + "methods": [ + "set_security_policy" + ] + }, "SetServiceAccount": { "methods": [ "set_service_account" @@ -1493,6 +1523,11 @@ "get_diagnostics" ] }, + "GetMacsecConfig": { + "methods": [ + "get_macsec_config" + ] + }, "Insert": { "methods": [ "insert" @@ -1683,6 +1718,11 @@ "list" ] }, + "Patch": { + "methods": [ + "patch" + ] + }, "SetIamPolicy": { "methods": [ "set_iam_policy" @@ -2213,6 +2253,11 @@ "rest": { "libraryClient": "PublicAdvertisedPrefixesClient", "rpcs": { + "Announce": { + "methods": [ + "announce" + ] + }, "Delete": { "methods": [ "delete" @@ -2237,6 +2282,11 @@ "methods": [ "patch" ] + }, + "Withdraw": { + "methods": [ + "withdraw" + ] } } } @@ -2252,6 +2302,11 @@ "aggregated_list" ] }, + "Announce": { + "methods": [ + "announce" + ] + }, "Delete": { "methods": [ "delete" @@ -2276,6 +2331,11 @@ "methods": [ "patch" ] + }, + "Withdraw": { + "methods": [ + "withdraw" + ] } } } @@ -2355,6 +2415,11 @@ "list" ] }, + "ListUsable": { + "methods": [ + "list_usable" + ] + }, "Patch": { "methods": [ "patch" @@ -2365,6 +2430,16 @@ "set_iam_policy" ] }, + "SetSecurityPolicy": { + "methods": [ + "set_security_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, "Update": { "methods": [ "update" @@ -2775,11 +2850,21 @@ "rest": { "libraryClient": "RegionNetworkEndpointGroupsClient", "rpcs": { + "AttachNetworkEndpoints": { + "methods": [ + "attach_network_endpoints" + ] + }, "Delete": { "methods": [ "delete" ] }, + "DetachNetworkEndpoints": { + "methods": [ + "detach_network_endpoints" + ] + }, "Get": { "methods": [ "get" @@ -2794,6 +2879,11 @@ "methods": [ "list" ] + }, + "ListNetworkEndpoints": { + "methods": [ + "list_network_endpoints" + ] } } } @@ -2956,6 +3046,11 @@ "rest": { "libraryClient": "RegionSecurityPoliciesClient", "rpcs": { + "AddRule": { + "methods": [ + "add_rule" + ] + }, "Delete": { "methods": [ "delete" @@ -2966,6 +3061,11 @@ "get" ] }, + "GetRule": { + "methods": [ + "get_rule" + ] + }, "Insert": { "methods": [ "insert" @@ -2980,6 +3080,16 @@ "methods": [ "patch" ] + }, + "PatchRule": { + "methods": [ + "patch_rule" + ] + }, + "RemoveRule": { + "methods": [ + "remove_rule" + ] } } } @@ -3356,6 +3466,11 @@ "get" ] }, + "GetNatIpInfo": { + "methods": [ + "get_nat_ip_info" + ] + }, "GetNatMappingInfo": { "methods": [ "get_nat_mapping_info" @@ -3547,6 +3662,25 @@ } } }, + "SnapshotSettingsService": { + "clients": { + "rest": { + "libraryClient": "SnapshotSettingsServiceClient", + "rpcs": { + "Get": { + "methods": [ + "get" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + } + } + } + } + }, "Snapshots": { "clients": { "rest": { @@ -3914,6 +4048,11 @@ "methods": [ "list" ] + }, + "SetSecurityPolicy": { + "methods": [ + "set_security_policy" + ] } } } @@ -3978,6 +4117,11 @@ "methods": [ "set_backup" ] + }, + "SetSecurityPolicy": { + "methods": [ + "set_security_policy" + ] } } } diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/transports/rest.py index 35cb3f801497..1150c8c00dff 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/transports/rest.py @@ -504,7 +504,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -697,7 +697,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -891,7 +891,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1001,7 +1001,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/transports/rest.py index ed866de8e111..146234018a79 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/transports/rest.py @@ -512,7 +512,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -711,7 +711,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -909,7 +909,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1019,7 +1019,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py index c5297370426f..c6ef1f02c005 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py @@ -1375,6 +1375,144 @@ def sample_get(): # Done; return the response. return response + def get_iam_policy( + self, + request: Optional[Union[compute.GetIamPolicyBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyBackendBucketRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.GetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyBackendBucketRequest): + request = compute.GetIamPolicyBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("resource", request.resource), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def insert_unary( self, request: Optional[Union[compute.InsertBackendBucketRequest, dict]] = None, @@ -2308,6 +2446,290 @@ def error_code(self): # Done; return the response. return response + def set_iam_policy( + self, + request: Optional[Union[compute.SetIamPolicyBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_policy_request_resource: Optional[ + compute.GlobalSetPolicyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyBackendBucketRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.SetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + This corresponds to the ``global_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, resource, global_set_policy_request_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyBackendBucketRequest): + request = compute.SetIamPolicyBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_policy_request_resource is not None: + request.global_set_policy_request_resource = ( + global_set_policy_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("resource", request.resource), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[ + Union[compute.TestIamPermissionsBackendBucketRequest, dict] + ] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[ + compute.TestPermissionsRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsBackendBucketRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.TestIamPermissions. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, resource, test_permissions_request_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsBackendBucketRequest): + request = compute.TestIamPermissionsBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = ( + test_permissions_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("resource", request.resource), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def update_unary( self, request: Optional[Union[compute.UpdateBackendBucketRequest, dict]] = None, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/base.py index 8d006250176a..3d37b86a3bc9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/base.py @@ -147,6 +147,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), self.insert: gapic_v1.method.wrap_method( self.insert, default_timeout=None, @@ -167,6 +172,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), self.update: gapic_v1.method.wrap_method( self.update, default_timeout=None, @@ -219,6 +234,15 @@ def get( ]: raise NotImplementedError() + @property + def get_iam_policy( + self, + ) -> Callable[ + [compute.GetIamPolicyBackendBucketRequest], + Union[compute.Policy, Awaitable[compute.Policy]], + ]: + raise NotImplementedError() + @property def insert( self, @@ -255,6 +279,26 @@ def set_edge_security_policy( ]: raise NotImplementedError() + @property + def set_iam_policy( + self, + ) -> Callable[ + [compute.SetIamPolicyBackendBucketRequest], + Union[compute.Policy, Awaitable[compute.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsBackendBucketRequest], + Union[ + compute.TestPermissionsResponse, Awaitable[compute.TestPermissionsResponse] + ], + ]: + raise NotImplementedError() + @property def update( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest.py index 275234e122ad..82cc6626d5c4 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest.py @@ -95,6 +95,14 @@ def post_get(self, response): logging.log(f"Received response: {response}") return response + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_insert(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -127,6 +135,22 @@ def post_set_edge_security_policy(self, response): logging.log(f"Received response: {response}") return response + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -229,6 +253,27 @@ def post_get(self, response: compute.BackendBucket) -> compute.BackendBucket: """ return response + def pre_get_iam_policy( + self, + request: compute.GetIamPolicyBackendBucketRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.GetIamPolicyBackendBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendBuckets server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BackendBuckets server but before + it is returned to user code. + """ + return response + def pre_insert( self, request: compute.InsertBackendBucketRequest, @@ -319,6 +364,52 @@ def post_set_edge_security_policy( """ return response + def pre_set_iam_policy( + self, + request: compute.SetIamPolicyBackendBucketRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.SetIamPolicyBackendBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendBuckets server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BackendBuckets server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: compute.TestIamPermissionsBackendBucketRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.TestIamPermissionsBackendBucketRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendBuckets server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: compute.TestPermissionsResponse + ) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the BackendBuckets server but before + it is returned to user code. + """ + return response + def pre_update( self, request: compute.UpdateBackendBucketRequest, @@ -494,7 +585,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -606,7 +697,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -709,7 +800,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -853,6 +944,115 @@ def __call__( resp = self._interceptor.post_get(resp) return resp + class _GetIamPolicy(BackendBucketsRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.GetIamPolicyBackendBucketRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyBackendBucketRequest): + The request object. A request message for + BackendBuckets.GetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/backendBuckets/{resource}/getIamPolicy", + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + class _Insert(BackendBucketsRestStub): def __hash__(self): return hash("Insert") @@ -903,7 +1103,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1101,7 +1301,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1211,7 +1411,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1273,6 +1473,221 @@ def __call__( resp = self._interceptor.post_set_edge_security_policy(resp) return resp + class _SetIamPolicy(BackendBucketsRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.SetIamPolicyBackendBucketRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyBackendBucketRequest): + The request object. A request message for + BackendBuckets.SetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/backendBuckets/{resource}/setIamPolicy", + "body": "global_set_policy_request_resource", + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(BackendBucketsRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.TestIamPermissionsBackendBucketRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsBackendBucketRequest): + The request object. A request message for + BackendBuckets.TestIamPermissions. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/backendBuckets/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + pb_request = compute.TestIamPermissionsBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + class _Update(BackendBucketsRestStub): def __hash__(self): return hash("Update") @@ -1323,7 +1738,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1413,6 +1828,14 @@ def get(self) -> Callable[[compute.GetBackendBucketRequest], compute.BackendBuck # In C++ this would require a dynamic_cast return self._Get(self._session, self._host, self._interceptor) # type: ignore + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyBackendBucketRequest], compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def insert( self, @@ -1445,6 +1868,25 @@ def set_edge_security_policy( # In C++ this would require a dynamic_cast return self._SetEdgeSecurityPolicy(self._session, self._host, self._interceptor) # type: ignore + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyBackendBucketRequest], compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsBackendBucketRequest], + compute.TestPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + @property def update( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py index 2658561a3756..09a04503c7d0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py @@ -1718,29 +1718,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -2154,6 +2136,121 @@ def sample_list(): # Done; return the response. return response + def list_usable( + self, + request: Optional[Union[compute.ListUsableBackendServicesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsablePager: + r"""Retrieves an aggregated list of all usable backend + services in the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_usable(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.ListUsableBackendServicesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_usable(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListUsableBackendServicesRequest, dict]): + The request object. A request message for + BackendServices.ListUsable. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.backend_services.pagers.ListUsablePager: + Contains a list of usable + BackendService resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListUsableBackendServicesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListUsableBackendServicesRequest): + request = compute.ListUsableBackendServicesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_usable] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUsablePager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + def patch_unary( self, request: Optional[Union[compute.PatchBackendServiceRequest, dict]] = None, @@ -2823,29 +2920,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -3200,6 +3279,138 @@ def error_code(self): # Done; return the response. return response + def test_iam_permissions( + self, + request: Optional[ + Union[compute.TestIamPermissionsBackendServiceRequest, dict] + ] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[ + compute.TestPermissionsRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsBackendServiceRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.TestIamPermissions. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, resource, test_permissions_request_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsBackendServiceRequest): + request = compute.TestIamPermissionsBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = ( + test_permissions_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("resource", request.resource), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def update_unary( self, request: Optional[Union[compute.UpdateBackendServiceRequest, dict]] = None, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/pagers.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/pagers.py index 34d386f3cd6e..2c5af50c56a3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/pagers.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/pagers.py @@ -152,3 +152,65 @@ def __iter__(self) -> Iterator[compute.BackendService]: def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsablePager: + """A pager for iterating through ``list_usable`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.BackendServiceListUsable` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUsable`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.BackendServiceListUsable` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.BackendServiceListUsable], + request: compute.ListUsableBackendServicesRequest, + response: compute.BackendServiceListUsable, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListUsableBackendServicesRequest): + The initial request object. + response (google.cloud.compute_v1.types.BackendServiceListUsable): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListUsableBackendServicesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.BackendServiceListUsable]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.BackendService]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/transports/base.py index 3836c0b08c68..7b605ff84d31 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/transports/base.py @@ -172,6 +172,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_usable: gapic_v1.method.wrap_method( + self.list_usable, + default_timeout=None, + client_info=client_info, + ), self.patch: gapic_v1.method.wrap_method( self.patch, default_timeout=None, @@ -192,6 +197,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), self.update: gapic_v1.method.wrap_method( self.update, default_timeout=None, @@ -295,6 +305,18 @@ def list( ]: raise NotImplementedError() + @property + def list_usable( + self, + ) -> Callable[ + [compute.ListUsableBackendServicesRequest], + Union[ + compute.BackendServiceListUsable, + Awaitable[compute.BackendServiceListUsable], + ], + ]: + raise NotImplementedError() + @property def patch( self, @@ -331,6 +353,17 @@ def set_security_policy( ]: raise NotImplementedError() + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsBackendServiceRequest], + Union[ + compute.TestPermissionsResponse, Awaitable[compute.TestPermissionsResponse] + ], + ]: + raise NotImplementedError() + @property def update( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/transports/rest.py index 047d831a0f87..3521d69ea732 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/transports/rest.py @@ -135,6 +135,14 @@ def post_list(self, response): logging.log(f"Received response: {response}") return response + def pre_list_usable(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_usable(self, response): + logging.log(f"Received response: {response}") + return response + def pre_patch(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -167,6 +175,14 @@ def post_set_security_policy(self, response): logging.log(f"Received response: {response}") return response + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -380,6 +396,29 @@ def post_list( """ return response + def pre_list_usable( + self, + request: compute.ListUsableBackendServicesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.ListUsableBackendServicesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_usable + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_list_usable( + self, response: compute.BackendServiceListUsable + ) -> compute.BackendServiceListUsable: + """Post-rpc interceptor for list_usable + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_patch( self, request: compute.PatchBackendServiceRequest, @@ -472,6 +511,31 @@ def post_set_security_policy( """ return response + def pre_test_iam_permissions( + self, + request: compute.TestIamPermissionsBackendServiceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.TestIamPermissionsBackendServiceRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: compute.TestPermissionsResponse + ) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_update( self, request: compute.UpdateBackendServiceRequest, @@ -647,7 +711,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -847,7 +911,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -950,7 +1014,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1253,29 +1317,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1376,7 +1422,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1524,6 +1570,94 @@ def __call__( resp = self._interceptor.post_list(resp) return resp + class _ListUsable(BackendServicesRestStub): + def __hash__(self): + return hash("ListUsable") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.ListUsableBackendServicesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.BackendServiceListUsable: + r"""Call the list usable method over HTTP. + + Args: + request (~.compute.ListUsableBackendServicesRequest): + The request object. A request message for + BackendServices.ListUsable. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.BackendServiceListUsable: + Contains a list of usable + BackendService resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/backendServices/listUsable", + }, + ] + request, metadata = self._interceptor.pre_list_usable(request, metadata) + pb_request = compute.ListUsableBackendServicesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendServiceListUsable() + pb_resp = compute.BackendServiceListUsable.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_usable(resp) + return resp + class _Patch(BackendServicesRestStub): def __hash__(self): return hash("Patch") @@ -1574,7 +1708,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1684,7 +1818,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1800,29 +1934,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1932,7 +2048,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1994,6 +2110,103 @@ def __call__( resp = self._interceptor.post_set_security_policy(resp) return resp + class _TestIamPermissions(BackendServicesRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.TestIamPermissionsBackendServiceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsBackendServiceRequest): + The request object. A request message for + BackendServices.TestIamPermissions. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/backendServices/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + pb_request = compute.TestIamPermissionsBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + class _Update(BackendServicesRestStub): def __hash__(self): return hash("Update") @@ -2044,7 +2257,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2181,6 +2394,16 @@ def list( # In C++ this would require a dynamic_cast return self._List(self._session, self._host, self._interceptor) # type: ignore + @property + def list_usable( + self, + ) -> Callable[ + [compute.ListUsableBackendServicesRequest], compute.BackendServiceListUsable + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListUsable(self._session, self._host, self._interceptor) # type: ignore + @property def patch( self, @@ -2215,6 +2438,17 @@ def set_security_policy( # In C++ this would require a dynamic_cast return self._SetSecurityPolicy(self._session, self._host, self._interceptor) # type: ignore + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsBackendServiceRequest], + compute.TestPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + @property def update( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py index 5c4e52803791..ce803ce5b37b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py @@ -1951,29 +1951,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -3146,29 +3128,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest.py index 7c31f2bbf080..b029fe89c58a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest.py @@ -745,7 +745,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -943,7 +943,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1053,7 +1053,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1162,7 +1162,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1364,29 +1364,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1486,7 +1468,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1681,7 +1663,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1792,7 +1774,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1906,29 +1888,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -2038,7 +2002,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2148,7 +2112,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2260,7 +2224,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2364,7 +2328,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2572,7 +2536,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py index 78c72efc786a..2c3fd236023c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py @@ -405,7 +405,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -606,7 +606,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -804,7 +804,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py index 6cbc55bf4eba..16a048ed9b54 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py @@ -1689,29 +1689,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -3583,29 +3565,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/transports/rest.py index bb5312f7107b..b7c5063c9d22 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/transports/rest.py @@ -761,7 +761,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -871,7 +871,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -981,7 +981,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1082,7 +1082,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1361,29 +1361,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1576,7 +1558,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1840,7 +1822,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1941,7 +1923,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2051,7 +2033,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2161,7 +2143,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2264,7 +2246,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2369,29 +2351,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/transports/rest.py index 9e4b5637b9b5..7864049f4468 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/transports/rest.py @@ -391,7 +391,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -582,7 +582,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -777,7 +777,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -887,7 +887,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py index 91f658b8dc1a..9ed745db98e3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py @@ -545,7 +545,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -744,7 +744,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -942,7 +942,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1052,7 +1052,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1162,7 +1162,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/transports/rest.py index 01e15f6fdde0..746f5a5ec9a1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/transports/rest.py @@ -397,7 +397,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -591,7 +591,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -787,7 +787,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -897,7 +897,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py index 443e61ec4379..fba5ff755031 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py @@ -428,7 +428,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -627,7 +627,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -825,7 +825,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -935,7 +935,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1045,7 +1045,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py index 9187bc42cc9e..616719e71759 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py @@ -1400,12 +1400,9 @@ def sample_get(): (NEG) defines how a set of endpoints should be reached, whether they are reachable, and where they are located. - For more information about using NEGs, - see Setting up external HTTP(S) Load - Balancing with internet NEGs, Setting up - zonal NEGs, or Setting up external - HTTP(S) Load Balancing with serverless - NEGs. + For more information about using NEGs + for different use cases, see Network + endpoint groups overview. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py index e9ec8865fe73..2405a5ffb77c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py @@ -451,7 +451,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -567,7 +567,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -668,7 +668,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -776,12 +776,9 @@ def __call__( (NEG) defines how a set of endpoints should be reached, whether they are reachable, and where they are located. - For more information about using NEGs, - see Setting up external HTTP(S) Load - Balancing with internet NEGs, Setting up - zonal NEGs, or Setting up external - HTTP(S) Load Balancing with serverless - NEGs. + For more information about using NEGs + for different use cases, see Network + endpoint groups overview. """ @@ -881,7 +878,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py index 7d076aee9ec4..50e8fc366167 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py @@ -729,7 +729,7 @@ def sample_get(): - For global operations, use the globalOperations resource. - For regional operations, use the regionOperations resource. - For zonal operations, use - the zonalOperations resource. For more information, read + the zoneOperations resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -986,7 +986,7 @@ def sample_wait(): - For global operations, use the globalOperations resource. - For regional operations, use the regionOperations resource. - For zonal operations, use - the zonalOperations resource. For more information, read + the zoneOperations resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/transports/rest.py index e40583b479ba..6dbe8904affb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/transports/rest.py @@ -549,7 +549,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -738,7 +738,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py index cb44d94f1ed6..f3c38220aecd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py @@ -603,7 +603,7 @@ def sample_get(): - For global operations, use the globalOperations resource. - For regional operations, use the regionOperations resource. - For zonal operations, use - the zonalOperations resource. For more information, read + the zoneOperations resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py index 89bad517ad3f..87ad13c7610a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py @@ -407,7 +407,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py index 944592a54a4c..3e033802efe7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py @@ -386,7 +386,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -582,7 +582,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -778,7 +778,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py index 2854683f62f2..cbcb554732a7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py @@ -860,25 +860,30 @@ def sample_get(): Returns: google.cloud.compute_v1.types.HealthCheck: - Represents a Health Check resource. Google Compute - Engine has two Health Check resources: \* - [Global](/compute/docs/reference/rest/v1/healthChecks) - \* + Represents a health check resource. Google Compute + Engine has two health check resources: \* [Regional](/compute/docs/reference/rest/v1/regionHealthChecks) - Internal HTTP(S) load balancers must use regional health - checks (compute.v1.regionHealthChecks). Traffic Director - must use global health checks (compute.v1.healthChecks). - Internal TCP/UDP load balancers can use either regional - or global health checks (compute.v1.regionHealthChecks - or compute.v1.healthChecks). External HTTP(S), TCP - proxy, and SSL proxy load balancers as well as managed - instance group auto-healing must use global health - checks (compute.v1.healthChecks). Backend service-based - network load balancers must use regional health checks - (compute.v1.regionHealthChecks). Target pool-based - network load balancers must use legacy HTTP health - checks (compute.v1.httpHealthChecks). For more - information, see Health checks overview. + \* + [Global](/compute/docs/reference/rest/v1/healthChecks) + These health check resources can be used for load + balancing and for autohealing VMs in a managed instance + group (MIG). **Load balancing** The following load + balancer can use either regional or global health check: + \* Internal TCP/UDP load balancer The following load + balancers require regional health check: \* Internal + HTTP(S) load balancer \* Backend service-based network + load balancer Traffic Director and the following load + balancers require global health check: \* External + HTTP(S) load balancer \* TCP proxy load balancer \* SSL + proxy load balancer The following load balancer require + [legacy HTTP health + checks](/compute/docs/reference/rest/v1/httpHealthChecks): + \* Target pool-based network load balancer **Autohealing + in MIGs** The health checks that you use for autohealing + VMs in a MIG can be either regional or global. For more + information, see Set up an application health check and + autohealing. For more information, see Health checks + overview. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/transports/rest.py index 42e8230a45be..0c3ed7852768 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/transports/rest.py @@ -514,7 +514,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -602,26 +602,30 @@ def __call__( Returns: ~.compute.HealthCheck: - Represents a Health Check resource. Google Compute - Engine has two Health Check resources: \* - `Global `__ - \* + Represents a health check resource. Google Compute + Engine has two health check resources: \* `Regional `__ - Internal HTTP(S) load balancers must use regional health - checks (``compute.v1.regionHealthChecks``). Traffic - Director must use global health checks - (``compute.v1.healthChecks``). Internal TCP/UDP load - balancers can use either regional or global health - checks (``compute.v1.regionHealthChecks`` or - ``compute.v1.healthChecks``). External HTTP(S), TCP - proxy, and SSL proxy load balancers as well as managed - instance group auto-healing must use global health - checks (``compute.v1.healthChecks``). Backend - service-based network load balancers must use regional - health checks (``compute.v1.regionHealthChecks``). - Target pool-based network load balancers must use legacy - HTTP health checks (``compute.v1.httpHealthChecks``). - For more information, see Health checks overview. + \* + `Global `__ + These health check resources can be used for load + balancing and for autohealing VMs in a managed instance + group (MIG). **Load balancing** The following load + balancer can use either regional or global health check: + \* Internal TCP/UDP load balancer The following load + balancers require regional health check: \* Internal + HTTP(S) load balancer \* Backend service-based network + load balancer Traffic Director and the following load + balancers require global health check: \* External + HTTP(S) load balancer \* TCP proxy load balancer \* SSL + proxy load balancer The following load balancer require + `legacy HTTP health + checks `__: + \* Target pool-based network load balancer **Autohealing + in MIGs** The health checks that you use for autohealing + VMs in a MIG can be either regional or global. For more + information, see Set up an application health check and + autohealing. For more information, see Health checks + overview. """ @@ -721,7 +725,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -919,7 +923,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1029,7 +1033,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py index 748518d7fb20..555ad0967766 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py @@ -1269,29 +1269,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -2067,29 +2049,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/transports/rest.py index 9f28028884c6..7b81f8ecb0a5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/transports/rest.py @@ -533,7 +533,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -634,7 +634,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -927,29 +927,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1049,7 +1031,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1243,7 +1225,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1357,29 +1339,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1489,7 +1453,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py index c4550dd8214a..984bbeb87783 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py @@ -486,9 +486,7 @@ def sample_abandon_instances(): Args: request (Union[google.cloud.compute_v1.types.AbandonInstancesInstanceGroupManagerRequest, dict]): - The request object. Messages - - A request message for + The request object. A request message for InstanceGroupManagers.AbandonInstances. See the method description for details. project (str): @@ -653,9 +651,7 @@ def sample_abandon_instances(): Args: request (Union[google.cloud.compute_v1.types.AbandonInstancesInstanceGroupManagerRequest, dict]): - The request object. Messages - - A request message for + The request object. A request message for InstanceGroupManagers.AbandonInstances. See the method description for details. project (str): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py index 9b85dc10eb54..c13603e007b9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py @@ -836,9 +836,7 @@ def __call__( Args: request (~.compute.AbandonInstancesInstanceGroupManagerRequest): - The request object. Messages - - A request message for + The request object. A request message for InstanceGroupManagers.AbandonInstances. See the method description for details. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -862,7 +860,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1061,7 +1059,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1175,7 +1173,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1287,7 +1285,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1388,7 +1386,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1501,7 +1499,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1710,7 +1708,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2172,7 +2170,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2283,7 +2281,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2397,7 +2395,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2513,7 +2511,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2614,7 +2612,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2728,7 +2726,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2841,7 +2839,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/transports/rest.py index 887f2b9adf4a..35ce3e13f077 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/transports/rest.py @@ -490,7 +490,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -686,7 +686,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -887,7 +887,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1178,7 +1178,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1290,7 +1290,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py index 8fa03a0bc701..203f98d0a46c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py @@ -869,11 +869,19 @@ def sample_get(): Returns: google.cloud.compute_v1.types.InstanceTemplate: - Represents an Instance Template - resource. You can use instance templates - to create VM instances and managed - instance groups. For more information, - read Instance Templates. + Represents an Instance Template resource. Google Compute + Engine has two Instance Template resources: \* + [Global](/compute/docs/reference/rest/v1/instanceTemplates) + \* + [Regional](/compute/docs/reference/rest/v1/regionInstanceTemplates) + You can reuse a global instance template in different + regions whereas you can use a regional instance template + in a specified region only. If you want to reduce + cross-region dependency or achieve data residency, use a + regional instance template. To create VMs, managed + instance groups, and reservations, you can use either + global or regional instance templates. For more + information, read Instance Templates. """ # Create or coerce a protobuf request object. @@ -1008,29 +1016,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -1541,29 +1531,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/transports/rest.py index f1c46473e4ec..fc465858a64c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/transports/rest.py @@ -553,7 +553,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -641,11 +641,19 @@ def __call__( Returns: ~.compute.InstanceTemplate: - Represents an Instance Template - resource. You can use instance templates - to create VM instances and managed - instance groups. For more information, - read Instance Templates. + Represents an Instance Template resource. Google Compute + Engine has two Instance Template resources: \* + `Global `__ + \* + `Regional `__ + You can reuse a global instance template in different + regions whereas you can use a regional instance template + in a specified region only. If you want to reduce + cross-region dependency or achieve data residency, use a + regional instance template. To create VMs, managed + instance groups, and reservations, you can use either + global or regional instance templates. For more + information, read Instance Templates. """ @@ -749,29 +757,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -872,7 +862,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1072,29 +1062,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py index 88ead93b8467..d3ebdbd7fb77 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py @@ -3179,29 +3179,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -5940,29 +5922,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -8197,6 +8161,323 @@ def error_code(self): # Done; return the response. return response + def set_security_policy_unary( + self, + request: Optional[Union[compute.SetSecurityPolicyInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_set_security_policy_request_resource: Optional[ + compute.InstancesSetSecurityPolicyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the Google Cloud Armor security policy for the + specified instance. For more information, see Google + Cloud Armor Overview + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_security_policy(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSecurityPolicyInstanceRequest, dict]): + The request object. A request message for + Instances.SetSecurityPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone scoping this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the Instance resource to + which the security policy should be set. + The name should conform to RFC1035. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_set_security_policy_request_resource (google.cloud.compute_v1.types.InstancesSetSecurityPolicyRequest): + The body resource for this request + This corresponds to the ``instances_set_security_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, zone, instance, instances_set_security_policy_request_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSecurityPolicyInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSecurityPolicyInstanceRequest): + request = compute.SetSecurityPolicyInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_set_security_policy_request_resource is not None: + request.instances_set_security_policy_request_resource = ( + instances_set_security_policy_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_security_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_security_policy( + self, + request: Optional[Union[compute.SetSecurityPolicyInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_set_security_policy_request_resource: Optional[ + compute.InstancesSetSecurityPolicyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the Google Cloud Armor security policy for the + specified instance. For more information, see Google + Cloud Armor Overview + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_security_policy(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSecurityPolicyInstanceRequest, dict]): + The request object. A request message for + Instances.SetSecurityPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone scoping this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the Instance resource to + which the security policy should be set. + The name should conform to RFC1035. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_set_security_policy_request_resource (google.cloud.compute_v1.types.InstancesSetSecurityPolicyRequest): + The body resource for this request + This corresponds to the ``instances_set_security_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, zone, instance, instances_set_security_policy_request_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSecurityPolicyInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSecurityPolicyInstanceRequest): + request = compute.SetSecurityPolicyInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_set_security_policy_request_resource is not None: + request.instances_set_security_policy_request_resource = ( + instances_set_security_policy_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_security_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def set_service_account_unary( self, request: Optional[Union[compute.SetServiceAccountInstanceRequest, dict]] = None, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/transports/base.py index 71e05cf63dd4..2aa74ac0602b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/transports/base.py @@ -287,6 +287,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.set_security_policy: gapic_v1.method.wrap_method( + self.set_security_policy, + default_timeout=None, + client_info=client_info, + ), self.set_service_account: gapic_v1.method.wrap_method( self.set_service_account, default_timeout=None, @@ -667,6 +672,15 @@ def set_scheduling( ]: raise NotImplementedError() + @property + def set_security_policy( + self, + ) -> Callable[ + [compute.SetSecurityPolicyInstanceRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def set_service_account( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/transports/rest.py index 3bfd316ac7f2..f2e49afdd4f3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/transports/rest.py @@ -319,6 +319,14 @@ def post_set_scheduling(self, response): logging.log(f"Received response: {response}") return response + def pre_set_security_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_security_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_set_service_account(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -1137,6 +1145,29 @@ def post_set_scheduling(self, response: compute.Operation) -> compute.Operation: """ return response + def pre_set_security_policy( + self, + request: compute.SetSecurityPolicyInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.SetSecurityPolicyInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_security_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_set_security_policy( + self, response: compute.Operation + ) -> compute.Operation: + """Post-rpc interceptor for set_security_policy + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_set_service_account( self, request: compute.SetServiceAccountInstanceRequest, @@ -1612,7 +1643,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1724,7 +1755,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1922,7 +1953,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2032,7 +2063,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2142,7 +2173,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2246,7 +2277,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2351,7 +2382,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2724,29 +2755,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -3110,7 +3123,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -3393,7 +3406,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -3505,7 +3518,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -3606,7 +3619,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -3798,7 +3811,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -3904,7 +3917,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -4011,29 +4024,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -4143,7 +4138,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -4253,7 +4248,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -4365,7 +4360,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -4477,7 +4472,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -4587,7 +4582,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -4699,7 +4694,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -4809,7 +4804,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -4869,6 +4864,118 @@ def __call__( resp = self._interceptor.post_set_scheduling(resp) return resp + class _SetSecurityPolicy(InstancesRestStub): + def __hash__(self): + return hash("SetSecurityPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.SetSecurityPolicyInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the set security policy method over HTTP. + + Args: + request (~.compute.SetSecurityPolicyInstanceRequest): + The request object. A request message for + Instances.SetSecurityPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setSecurityPolicy", + "body": "instances_set_security_policy_request_resource", + }, + ] + request, metadata = self._interceptor.pre_set_security_policy( + request, metadata + ) + pb_request = compute.SetSecurityPolicyInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_security_policy(resp) + return resp + class _SetServiceAccount(InstancesRestStub): def __hash__(self): return hash("SetServiceAccount") @@ -4919,7 +5026,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -5032,7 +5139,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -5149,7 +5256,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -5260,7 +5367,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -5363,7 +5470,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -5464,7 +5571,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -5575,7 +5682,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -5676,7 +5783,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -5874,7 +5981,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -5986,7 +6093,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -6098,7 +6205,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -6212,7 +6319,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -6325,7 +6432,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -6644,6 +6751,14 @@ def set_scheduling( # In C++ this would require a dynamic_cast return self._SetScheduling(self._session, self._host, self._interceptor) # type: ignore + @property + def set_security_policy( + self, + ) -> Callable[[compute.SetSecurityPolicyInstanceRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetSecurityPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def set_service_account( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py index a811fb03b84a..ed66d890146e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py @@ -524,7 +524,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -719,7 +719,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -918,7 +918,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1028,7 +1028,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py index b02ffeda2bbf..99844d460be0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py @@ -808,7 +808,12 @@ def get_diagnostics( metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InterconnectsGetDiagnosticsResponse: r"""Returns the interconnectDiagnostics for the specified - Interconnect. + Interconnect. In the event of a global outage, do not + use this API to make decisions about where to redirect + your network traffic. Unlike a VLAN attachment, which is + regional, a Cloud Interconnect connection is a global + resource. A global outage can prevent this API from + functioning properly. .. code-block:: python @@ -915,6 +920,126 @@ def sample_get_diagnostics(): # Done; return the response. return response + def get_macsec_config( + self, + request: Optional[ + Union[compute.GetMacsecConfigInterconnectRequest, dict] + ] = None, + *, + project: Optional[str] = None, + interconnect: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InterconnectsGetMacsecConfigResponse: + r"""Returns the interconnectMacsecConfig for the + specified Interconnect. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_macsec_config(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.GetMacsecConfigInterconnectRequest( + interconnect="interconnect_value", + project="project_value", + ) + + # Make the request + response = client.get_macsec_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetMacsecConfigInterconnectRequest, dict]): + The request object. A request message for + Interconnects.GetMacsecConfig. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect (str): + Name of the interconnect resource to + query. + + This corresponds to the ``interconnect`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.InterconnectsGetMacsecConfigResponse: + Response for the + InterconnectsGetMacsecConfigRequest. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, interconnect]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetMacsecConfigInterconnectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetMacsecConfigInterconnectRequest): + request = compute.GetMacsecConfigInterconnectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if interconnect is not None: + request.interconnect = interconnect + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_macsec_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("interconnect", request.interconnect), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def insert_unary( self, request: Optional[Union[compute.InsertInterconnectRequest, dict]] = None, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/transports/base.py index e97e30e3b8ee..4541d5ad7ea6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/transports/base.py @@ -142,6 +142,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_macsec_config: gapic_v1.method.wrap_method( + self.get_macsec_config, + default_timeout=None, + client_info=client_info, + ), self.insert: gapic_v1.method.wrap_method( self.insert, default_timeout=None, @@ -203,6 +208,18 @@ def get_diagnostics( ]: raise NotImplementedError() + @property + def get_macsec_config( + self, + ) -> Callable[ + [compute.GetMacsecConfigInterconnectRequest], + Union[ + compute.InterconnectsGetMacsecConfigResponse, + Awaitable[compute.InterconnectsGetMacsecConfigResponse], + ], + ]: + raise NotImplementedError() + @property def insert( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/transports/rest.py index 1edbc2b6c908..c53df4a1aa98 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/transports/rest.py @@ -87,6 +87,14 @@ def post_get_diagnostics(self, response): logging.log(f"Received response: {response}") return response + def pre_get_macsec_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_macsec_config(self, response): + logging.log(f"Received response: {response}") + return response + def pre_insert(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -190,6 +198,29 @@ def post_get_diagnostics( """ return response + def pre_get_macsec_config( + self, + request: compute.GetMacsecConfigInterconnectRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.GetMacsecConfigInterconnectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_macsec_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Interconnects server. + """ + return request, metadata + + def post_get_macsec_config( + self, response: compute.InterconnectsGetMacsecConfigResponse + ) -> compute.InterconnectsGetMacsecConfigResponse: + """Post-rpc interceptor for get_macsec_config + + Override in a subclass to manipulate the response + after it is returned by the Interconnects server but before + it is returned to user code. + """ + return response + def pre_insert( self, request: compute.InsertInterconnectRequest, @@ -428,7 +459,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -659,6 +690,96 @@ def __call__( resp = self._interceptor.post_get_diagnostics(resp) return resp + class _GetMacsecConfig(InterconnectsRestStub): + def __hash__(self): + return hash("GetMacsecConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.GetMacsecConfigInterconnectRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InterconnectsGetMacsecConfigResponse: + r"""Call the get macsec config method over HTTP. + + Args: + request (~.compute.GetMacsecConfigInterconnectRequest): + The request object. A request message for + Interconnects.GetMacsecConfig. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InterconnectsGetMacsecConfigResponse: + Response for the + InterconnectsGetMacsecConfigRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/interconnects/{interconnect}/getMacsecConfig", + }, + ] + request, metadata = self._interceptor.pre_get_macsec_config( + request, metadata + ) + pb_request = compute.GetMacsecConfigInterconnectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InterconnectsGetMacsecConfigResponse() + pb_resp = compute.InterconnectsGetMacsecConfigResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_macsec_config(resp) + return resp + class _Insert(InterconnectsRestStub): def __hash__(self): return hash("Insert") @@ -709,7 +830,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -907,7 +1028,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1017,7 +1138,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1102,6 +1223,17 @@ def get_diagnostics( # In C++ this would require a dynamic_cast return self._GetDiagnostics(self._session, self._host, self._interceptor) # type: ignore + @property + def get_macsec_config( + self, + ) -> Callable[ + [compute.GetMacsecConfigInterconnectRequest], + compute.InterconnectsGetMacsecConfigResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetMacsecConfig(self._session, self._host, self._interceptor) # type: ignore + @property def insert( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py index e147cc1924e9..ad1295bdf966 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py @@ -887,29 +887,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -1418,29 +1400,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/transports/rest.py index c803096313cc..cdb5f1fd309e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/transports/rest.py @@ -424,7 +424,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -621,29 +621,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -744,7 +726,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -943,29 +925,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py index 927a2b2c44ee..a86985f2ae56 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py @@ -883,29 +883,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -1414,29 +1396,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/transports/rest.py index 6ff363938cc9..8be9aef8753c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/transports/rest.py @@ -430,7 +430,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -629,29 +629,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -752,7 +734,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -952,29 +934,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py index 0118f62db82b..e411a89175fa 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py @@ -1042,29 +1042,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -1522,6 +1504,311 @@ def sample_list(): # Done; return the response. return response + def patch_unary( + self, + request: Optional[Union[compute.PatchNetworkAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_attachment: Optional[str] = None, + network_attachment_resource: Optional[compute.NetworkAttachment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified NetworkAttachment resource with + the data included in the request. This method supports + PATCH semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.PatchNetworkAttachmentRequest( + network_attachment="network_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchNetworkAttachmentRequest, dict]): + The request object. A request message for + NetworkAttachments.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_attachment (str): + Name of the NetworkAttachment + resource to patch. + + This corresponds to the ``network_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_attachment_resource (google.cloud.compute_v1.types.NetworkAttachment): + The body resource for this request + This corresponds to the ``network_attachment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, region, network_attachment, network_attachment_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchNetworkAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchNetworkAttachmentRequest): + request = compute.PatchNetworkAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_attachment is not None: + request.network_attachment = network_attachment + if network_attachment_resource is not None: + request.network_attachment_resource = network_attachment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("network_attachment", request.network_attachment), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch( + self, + request: Optional[Union[compute.PatchNetworkAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_attachment: Optional[str] = None, + network_attachment_resource: Optional[compute.NetworkAttachment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified NetworkAttachment resource with + the data included in the request. This method supports + PATCH semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.PatchNetworkAttachmentRequest( + network_attachment="network_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchNetworkAttachmentRequest, dict]): + The request object. A request message for + NetworkAttachments.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_attachment (str): + Name of the NetworkAttachment + resource to patch. + + This corresponds to the ``network_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_attachment_resource (google.cloud.compute_v1.types.NetworkAttachment): + The body resource for this request + This corresponds to the ``network_attachment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, region, network_attachment, network_attachment_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchNetworkAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchNetworkAttachmentRequest): + request = compute.PatchNetworkAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_attachment is not None: + request.network_attachment = network_attachment + if network_attachment_resource is not None: + request.network_attachment_resource = network_attachment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("network_attachment", request.network_attachment), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def set_iam_policy( self, request: Optional[ @@ -1622,29 +1909,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/transports/base.py index 2bb12a91fdc2..aa20640733b7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/transports/base.py @@ -157,6 +157,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), self.set_iam_policy: gapic_v1.method.wrap_method( self.set_iam_policy, default_timeout=None, @@ -235,6 +240,15 @@ def list( ]: raise NotImplementedError() + @property + def patch( + self, + ) -> Callable[ + [compute.PatchNetworkAttachmentRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def set_iam_policy( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/transports/rest.py index 3441c8265c91..308dceb17f55 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/transports/rest.py @@ -111,6 +111,14 @@ def post_list(self, response): logging.log(f"Received response: {response}") return response + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + def pre_set_iam_policy(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -267,6 +275,27 @@ def post_list( """ return response + def pre_patch( + self, + request: compute.PatchNetworkAttachmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.PatchNetworkAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkAttachments server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the NetworkAttachments server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy( self, request: compute.SetIamPolicyNetworkAttachmentRequest, @@ -555,7 +584,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -748,29 +777,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -871,7 +882,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1017,6 +1028,116 @@ def __call__( resp = self._interceptor.post_list(resp) return resp + class _Patch(NetworkAttachmentsRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.PatchNetworkAttachmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchNetworkAttachmentRequest): + The request object. A request message for + NetworkAttachments.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/networkAttachments/{network_attachment}", + "body": "network_attachment_resource", + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchNetworkAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + class _SetIamPolicy(NetworkAttachmentsRestStub): def __hash__(self): return hash("SetIamPolicy") @@ -1071,29 +1192,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1303,6 +1406,14 @@ def list( # In C++ this would require a dynamic_cast return self._List(self._session, self._host, self._interceptor) # type: ignore + @property + def patch( + self, + ) -> Callable[[compute.PatchNetworkAttachmentRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + @property def set_iam_policy( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/transports/rest.py index 5286e81c3c49..fd242b75ae07 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/transports/rest.py @@ -469,7 +469,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -658,7 +658,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -768,7 +768,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py index 79a1b5b329e1..b518e6899a63 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py @@ -1590,12 +1590,9 @@ def sample_get(): (NEG) defines how a set of endpoints should be reached, whether they are reachable, and where they are located. - For more information about using NEGs, - see Setting up external HTTP(S) Load - Balancing with internet NEGs, Setting up - zonal NEGs, or Setting up external - HTTP(S) Load Balancing with serverless - NEGs. + For more information about using NEGs + for different use cases, see Network + endpoint groups overview. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py index 84bbdc63d342..19693c781c21 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py @@ -597,7 +597,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -711,7 +711,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -812,7 +812,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -918,12 +918,9 @@ def __call__( (NEG) defines how a set of endpoints should be reached, whether they are reachable, and where they are located. - For more information about using NEGs, - see Setting up external HTTP(S) Load - Balancing with internet NEGs, Setting up - zonal NEGs, or Setting up external - HTTP(S) Load Balancing with serverless - NEGs. + For more information about using NEGs + for different use cases, see Network + endpoint groups overview. """ @@ -1023,7 +1020,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py index 9cb799f1393c..5314f3cab587 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py @@ -1836,29 +1836,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -3578,29 +3560,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/rest.py index 5ad180fba393..c8f796721c18 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/rest.py @@ -714,7 +714,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -824,7 +824,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -934,7 +934,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1035,7 +1035,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1314,29 +1314,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1527,7 +1509,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1723,7 +1705,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1833,7 +1815,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1943,7 +1925,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2048,7 +2030,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2153,29 +2135,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/transports/rest.py index 4cfcd44ac057..154f6c6c7019 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/transports/rest.py @@ -538,7 +538,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -648,7 +648,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -927,7 +927,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1209,7 +1209,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1319,7 +1319,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1429,7 +1429,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1532,7 +1532,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py index 13eb85f45bf2..baa539afc9cc 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py @@ -1664,29 +1664,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -2712,29 +2694,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/rest.py index c54e21647f8b..a71d1bff375a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/rest.py @@ -637,7 +637,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -833,7 +833,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -934,7 +934,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1144,29 +1144,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1269,7 +1251,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1551,7 +1533,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1665,29 +1647,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1797,7 +1761,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1910,7 +1874,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py index 8af299fcda71..ccb7b49e492c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py @@ -1038,29 +1038,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -1622,29 +1604,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/transports/rest.py index 320623ebda93..69560c4a92cc 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/transports/rest.py @@ -547,7 +547,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -743,29 +743,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -866,7 +848,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1066,29 +1048,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py index 3be81774f4fa..4884f8e68303 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py @@ -522,7 +522,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -719,7 +719,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -917,7 +917,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/transports/rest.py index 5daf438fe430..303d79fc411c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/transports/rest.py @@ -612,7 +612,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -715,7 +715,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -827,7 +827,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -928,7 +928,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1404,7 +1404,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1514,7 +1514,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1625,7 +1625,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1737,7 +1737,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1849,7 +1849,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py index 0e657c38b315..1725dfea42bc 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py @@ -423,6 +423,268 @@ def __init__( api_audience=client_options.api_audience, ) + def announce_unary( + self, + request: Optional[ + Union[compute.AnnouncePublicAdvertisedPrefixeRequest, dict] + ] = None, + *, + project: Optional[str] = None, + public_advertised_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Announces the specified PublicAdvertisedPrefix + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_announce(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.AnnouncePublicAdvertisedPrefixeRequest( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + ) + + # Make the request + response = client.announce(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AnnouncePublicAdvertisedPrefixeRequest, dict]): + The request object. A request message for + PublicAdvertisedPrefixes.Announce. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_advertised_prefix (str): + The name of the public advertised + prefix. It should comply with RFC1035. + + This corresponds to the ``public_advertised_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_advertised_prefix]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AnnouncePublicAdvertisedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AnnouncePublicAdvertisedPrefixeRequest): + request = compute.AnnouncePublicAdvertisedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_advertised_prefix is not None: + request.public_advertised_prefix = public_advertised_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.announce] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("public_advertised_prefix", request.public_advertised_prefix), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def announce( + self, + request: Optional[ + Union[compute.AnnouncePublicAdvertisedPrefixeRequest, dict] + ] = None, + *, + project: Optional[str] = None, + public_advertised_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Announces the specified PublicAdvertisedPrefix + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_announce(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.AnnouncePublicAdvertisedPrefixeRequest( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + ) + + # Make the request + response = client.announce(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AnnouncePublicAdvertisedPrefixeRequest, dict]): + The request object. A request message for + PublicAdvertisedPrefixes.Announce. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_advertised_prefix (str): + The name of the public advertised + prefix. It should comply with RFC1035. + + This corresponds to the ``public_advertised_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_advertised_prefix]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AnnouncePublicAdvertisedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AnnouncePublicAdvertisedPrefixeRequest): + request = compute.AnnouncePublicAdvertisedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_advertised_prefix is not None: + request.public_advertised_prefix = public_advertised_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.announce] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("public_advertised_prefix", request.public_advertised_prefix), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def delete_unary( self, request: Optional[ @@ -1477,6 +1739,268 @@ def error_code(self): # Done; return the response. return response + def withdraw_unary( + self, + request: Optional[ + Union[compute.WithdrawPublicAdvertisedPrefixeRequest, dict] + ] = None, + *, + project: Optional[str] = None, + public_advertised_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Withdraws the specified PublicAdvertisedPrefix + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_withdraw(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.WithdrawPublicAdvertisedPrefixeRequest( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + ) + + # Make the request + response = client.withdraw(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.WithdrawPublicAdvertisedPrefixeRequest, dict]): + The request object. A request message for + PublicAdvertisedPrefixes.Withdraw. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_advertised_prefix (str): + The name of the public advertised + prefix. It should comply with RFC1035. + + This corresponds to the ``public_advertised_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_advertised_prefix]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.WithdrawPublicAdvertisedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.WithdrawPublicAdvertisedPrefixeRequest): + request = compute.WithdrawPublicAdvertisedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_advertised_prefix is not None: + request.public_advertised_prefix = public_advertised_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.withdraw] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("public_advertised_prefix", request.public_advertised_prefix), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def withdraw( + self, + request: Optional[ + Union[compute.WithdrawPublicAdvertisedPrefixeRequest, dict] + ] = None, + *, + project: Optional[str] = None, + public_advertised_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Withdraws the specified PublicAdvertisedPrefix + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_withdraw(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.WithdrawPublicAdvertisedPrefixeRequest( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + ) + + # Make the request + response = client.withdraw(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.WithdrawPublicAdvertisedPrefixeRequest, dict]): + The request object. A request message for + PublicAdvertisedPrefixes.Withdraw. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_advertised_prefix (str): + The name of the public advertised + prefix. It should comply with RFC1035. + + This corresponds to the ``public_advertised_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_advertised_prefix]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.WithdrawPublicAdvertisedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.WithdrawPublicAdvertisedPrefixeRequest): + request = compute.WithdrawPublicAdvertisedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_advertised_prefix is not None: + request.public_advertised_prefix = public_advertised_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.withdraw] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("public_advertised_prefix", request.public_advertised_prefix), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def __enter__(self) -> "PublicAdvertisedPrefixesClient": return self diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/transports/base.py index ecd20f00ecf4..27795407d4a0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/transports/base.py @@ -127,6 +127,11 @@ def __init__( def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { + self.announce: gapic_v1.method.wrap_method( + self.announce, + default_timeout=None, + client_info=client_info, + ), self.delete: gapic_v1.method.wrap_method( self.delete, default_timeout=None, @@ -152,6 +157,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.withdraw: gapic_v1.method.wrap_method( + self.withdraw, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -163,6 +173,15 @@ def close(self): """ raise NotImplementedError() + @property + def announce( + self, + ) -> Callable[ + [compute.AnnouncePublicAdvertisedPrefixeRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def delete( self, @@ -213,6 +232,15 @@ def patch( ]: raise NotImplementedError() + @property + def withdraw( + self, + ) -> Callable[ + [compute.WithdrawPublicAdvertisedPrefixeRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py index 3685fcc1bd14..7a10a5570ab3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py @@ -63,6 +63,14 @@ class PublicAdvertisedPrefixesRestInterceptor: .. code-block:: python class MyCustomPublicAdvertisedPrefixesInterceptor(PublicAdvertisedPrefixesRestInterceptor): + def pre_announce(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_announce(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -103,12 +111,43 @@ def post_patch(self, response): logging.log(f"Received response: {response}") return response + def pre_withdraw(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_withdraw(self, response): + logging.log(f"Received response: {response}") + return response + transport = PublicAdvertisedPrefixesRestTransport(interceptor=MyCustomPublicAdvertisedPrefixesInterceptor()) client = PublicAdvertisedPrefixesClient(transport=transport) """ + def pre_announce( + self, + request: compute.AnnouncePublicAdvertisedPrefixeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.AnnouncePublicAdvertisedPrefixeRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for announce + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicAdvertisedPrefixes server. + """ + return request, metadata + + def post_announce(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for announce + + Override in a subclass to manipulate the response + after it is returned by the PublicAdvertisedPrefixes server but before + it is returned to user code. + """ + return response + def pre_delete( self, request: compute.DeletePublicAdvertisedPrefixeRequest, @@ -218,6 +257,29 @@ def post_patch(self, response: compute.Operation) -> compute.Operation: """ return response + def pre_withdraw( + self, + request: compute.WithdrawPublicAdvertisedPrefixeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.WithdrawPublicAdvertisedPrefixeRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for withdraw + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicAdvertisedPrefixes server. + """ + return request, metadata + + def post_withdraw(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for withdraw + + Override in a subclass to manipulate the response + after it is returned by the PublicAdvertisedPrefixes server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class PublicAdvertisedPrefixesRestStub: @@ -322,6 +384,107 @@ def __init__( self._interceptor = interceptor or PublicAdvertisedPrefixesRestInterceptor() self._prep_wrapped_messages(client_info) + class _Announce(PublicAdvertisedPrefixesRestStub): + def __hash__(self): + return hash("Announce") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.AnnouncePublicAdvertisedPrefixeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the announce method over HTTP. + + Args: + request (~.compute.AnnouncePublicAdvertisedPrefixeRequest): + The request object. A request message for + PublicAdvertisedPrefixes.Announce. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}/announce", + }, + ] + request, metadata = self._interceptor.pre_announce(request, metadata) + pb_request = compute.AnnouncePublicAdvertisedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_announce(resp) + return resp + class _Delete(PublicAdvertisedPrefixesRestStub): def __hash__(self): return hash("Delete") @@ -372,7 +535,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -565,7 +728,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -761,7 +924,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -821,6 +984,115 @@ def __call__( resp = self._interceptor.post_patch(resp) return resp + class _Withdraw(PublicAdvertisedPrefixesRestStub): + def __hash__(self): + return hash("Withdraw") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.WithdrawPublicAdvertisedPrefixeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the withdraw method over HTTP. + + Args: + request (~.compute.WithdrawPublicAdvertisedPrefixeRequest): + The request object. A request message for + PublicAdvertisedPrefixes.Withdraw. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}/withdraw", + }, + ] + request, metadata = self._interceptor.pre_withdraw(request, metadata) + pb_request = compute.WithdrawPublicAdvertisedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_withdraw(resp) + return resp + + @property + def announce( + self, + ) -> Callable[[compute.AnnouncePublicAdvertisedPrefixeRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Announce(self._session, self._host, self._interceptor) # type: ignore + @property def delete( self, @@ -866,6 +1138,14 @@ def patch( # In C++ this would require a dynamic_cast return self._Patch(self._session, self._host, self._interceptor) # type: ignore + @property + def withdraw( + self, + ) -> Callable[[compute.WithdrawPublicAdvertisedPrefixeRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Withdraw(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py index 48e08292d317..de021ff1c781 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py @@ -542,6 +542,297 @@ def sample_aggregated_list(): # Done; return the response. return response + def announce_unary( + self, + request: Optional[ + Union[compute.AnnouncePublicDelegatedPrefixeRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + public_delegated_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Announces the specified PublicDelegatedPrefix in the + given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_announce(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.AnnouncePublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + region="region_value", + ) + + # Make the request + response = client.announce(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AnnouncePublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + PublicDelegatedPrefixes.Announce. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where the + public delegated prefix is located. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix (str): + The name of the public delegated + prefix. It should comply with RFC1035. + + This corresponds to the ``public_delegated_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, public_delegated_prefix]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AnnouncePublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AnnouncePublicDelegatedPrefixeRequest): + request = compute.AnnouncePublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if public_delegated_prefix is not None: + request.public_delegated_prefix = public_delegated_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.announce] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("public_delegated_prefix", request.public_delegated_prefix), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def announce( + self, + request: Optional[ + Union[compute.AnnouncePublicDelegatedPrefixeRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + public_delegated_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Announces the specified PublicDelegatedPrefix in the + given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_announce(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.AnnouncePublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + region="region_value", + ) + + # Make the request + response = client.announce(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AnnouncePublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + PublicDelegatedPrefixes.Announce. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where the + public delegated prefix is located. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix (str): + The name of the public delegated + prefix. It should comply with RFC1035. + + This corresponds to the ``public_delegated_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, public_delegated_prefix]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AnnouncePublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AnnouncePublicDelegatedPrefixeRequest): + request = compute.AnnouncePublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if public_delegated_prefix is not None: + request.public_delegated_prefix = public_delegated_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.announce] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("public_delegated_prefix", request.public_delegated_prefix), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def delete_unary( self, request: Optional[ @@ -1695,6 +1986,297 @@ def error_code(self): # Done; return the response. return response + def withdraw_unary( + self, + request: Optional[ + Union[compute.WithdrawPublicDelegatedPrefixeRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + public_delegated_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Withdraws the specified PublicDelegatedPrefix in the + given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_withdraw(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.WithdrawPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + region="region_value", + ) + + # Make the request + response = client.withdraw(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.WithdrawPublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + PublicDelegatedPrefixes.Withdraw. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where the + public delegated prefix is located. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix (str): + The name of the public delegated + prefix. It should comply with RFC1035. + + This corresponds to the ``public_delegated_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, public_delegated_prefix]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.WithdrawPublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.WithdrawPublicDelegatedPrefixeRequest): + request = compute.WithdrawPublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if public_delegated_prefix is not None: + request.public_delegated_prefix = public_delegated_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.withdraw] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("public_delegated_prefix", request.public_delegated_prefix), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def withdraw( + self, + request: Optional[ + Union[compute.WithdrawPublicDelegatedPrefixeRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + public_delegated_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Withdraws the specified PublicDelegatedPrefix in the + given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_withdraw(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.WithdrawPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + region="region_value", + ) + + # Make the request + response = client.withdraw(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.WithdrawPublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + PublicDelegatedPrefixes.Withdraw. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where the + public delegated prefix is located. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix (str): + The name of the public delegated + prefix. It should comply with RFC1035. + + This corresponds to the ``public_delegated_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, public_delegated_prefix]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.WithdrawPublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.WithdrawPublicDelegatedPrefixeRequest): + request = compute.WithdrawPublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if public_delegated_prefix is not None: + request.public_delegated_prefix = public_delegated_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.withdraw] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("public_delegated_prefix", request.public_delegated_prefix), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def __enter__(self) -> "PublicDelegatedPrefixesClient": return self diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/transports/base.py index e794a5c5a9be..f75d6c7cdfe3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/transports/base.py @@ -132,6 +132,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.announce: gapic_v1.method.wrap_method( + self.announce, + default_timeout=None, + client_info=client_info, + ), self.delete: gapic_v1.method.wrap_method( self.delete, default_timeout=None, @@ -157,6 +162,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.withdraw: gapic_v1.method.wrap_method( + self.withdraw, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -180,6 +190,15 @@ def aggregated_list( ]: raise NotImplementedError() + @property + def announce( + self, + ) -> Callable[ + [compute.AnnouncePublicDelegatedPrefixeRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def delete( self, @@ -228,6 +247,15 @@ def patch( ]: raise NotImplementedError() + @property + def withdraw( + self, + ) -> Callable[ + [compute.WithdrawPublicDelegatedPrefixeRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py index 320c861fe8b8..e63061ed66c6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py @@ -71,6 +71,14 @@ def post_aggregated_list(self, response): logging.log(f"Received response: {response}") return response + def pre_announce(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_announce(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -111,6 +119,14 @@ def post_patch(self, response): logging.log(f"Received response: {response}") return response + def pre_withdraw(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_withdraw(self, response): + logging.log(f"Received response: {response}") + return response + transport = PublicDelegatedPrefixesRestTransport(interceptor=MyCustomPublicDelegatedPrefixesInterceptor()) client = PublicDelegatedPrefixesClient(transport=transport) @@ -142,6 +158,29 @@ def post_aggregated_list( """ return response + def pre_announce( + self, + request: compute.AnnouncePublicDelegatedPrefixeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.AnnouncePublicDelegatedPrefixeRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for announce + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicDelegatedPrefixes server. + """ + return request, metadata + + def post_announce(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for announce + + Override in a subclass to manipulate the response + after it is returned by the PublicDelegatedPrefixes server but before + it is returned to user code. + """ + return response + def pre_delete( self, request: compute.DeletePublicDelegatedPrefixeRequest, @@ -251,6 +290,29 @@ def post_patch(self, response: compute.Operation) -> compute.Operation: """ return response + def pre_withdraw( + self, + request: compute.WithdrawPublicDelegatedPrefixeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.WithdrawPublicDelegatedPrefixeRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for withdraw + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicDelegatedPrefixes server. + """ + return request, metadata + + def post_withdraw(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for withdraw + + Override in a subclass to manipulate the response + after it is returned by the PublicDelegatedPrefixes server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class PublicDelegatedPrefixesRestStub: @@ -443,6 +505,107 @@ def __call__( resp = self._interceptor.post_aggregated_list(resp) return resp + class _Announce(PublicDelegatedPrefixesRestStub): + def __hash__(self): + return hash("Announce") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.AnnouncePublicDelegatedPrefixeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the announce method over HTTP. + + Args: + request (~.compute.AnnouncePublicDelegatedPrefixeRequest): + The request object. A request message for + PublicDelegatedPrefixes.Announce. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}/announce", + }, + ] + request, metadata = self._interceptor.pre_announce(request, metadata) + pb_request = compute.AnnouncePublicDelegatedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_announce(resp) + return resp + class _Delete(PublicDelegatedPrefixesRestStub): def __hash__(self): return hash("Delete") @@ -493,7 +656,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -689,7 +852,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -885,7 +1048,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -945,6 +1108,107 @@ def __call__( resp = self._interceptor.post_patch(resp) return resp + class _Withdraw(PublicDelegatedPrefixesRestStub): + def __hash__(self): + return hash("Withdraw") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.WithdrawPublicDelegatedPrefixeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the withdraw method over HTTP. + + Args: + request (~.compute.WithdrawPublicDelegatedPrefixeRequest): + The request object. A request message for + PublicDelegatedPrefixes.Withdraw. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}/withdraw", + }, + ] + request, metadata = self._interceptor.pre_withdraw(request, metadata) + pb_request = compute.WithdrawPublicDelegatedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_withdraw(resp) + return resp + @property def aggregated_list( self, @@ -956,6 +1220,14 @@ def aggregated_list( # In C++ this would require a dynamic_cast return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + @property + def announce( + self, + ) -> Callable[[compute.AnnouncePublicDelegatedPrefixeRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Announce(self._session, self._host, self._interceptor) # type: ignore + @property def delete( self, @@ -1000,6 +1272,14 @@ def patch( # In C++ this would require a dynamic_cast return self._Patch(self._session, self._host, self._interceptor) # type: ignore + @property + def withdraw( + self, + ) -> Callable[[compute.WithdrawPublicDelegatedPrefixeRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Withdraw(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py index b77be7d5d0c3..b9cfa0d333fb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py @@ -399,7 +399,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -598,7 +598,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -794,7 +794,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -904,7 +904,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py index 61a5179c4430..2d08fb5d90ad 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py @@ -1091,29 +1091,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -1586,6 +1568,140 @@ def sample_list(): # Done; return the response. return response + def list_usable( + self, + request: Optional[ + Union[compute.ListUsableRegionBackendServicesRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsablePager: + r"""Retrieves an aggregated list of all usable backend + services in the specified project in the given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_usable(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.ListUsableRegionBackendServicesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list_usable(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListUsableRegionBackendServicesRequest, dict]): + The request object. A request message for + RegionBackendServices.ListUsable. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. It must be a string that meets + the requirements in RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_backend_services.pagers.ListUsablePager: + Contains a list of usable + BackendService resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListUsableRegionBackendServicesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListUsableRegionBackendServicesRequest): + request = compute.ListUsableRegionBackendServicesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_usable] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUsablePager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + def patch_unary( self, request: Optional[Union[compute.PatchRegionBackendServiceRequest, dict]] = None, @@ -1997,29 +2113,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -2081,6 +2179,477 @@ def sample_set_iam_policy(): # Done; return the response. return response + def set_security_policy_unary( + self, + request: Optional[ + Union[compute.SetSecurityPolicyRegionBackendServiceRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_service: Optional[str] = None, + security_policy_reference_resource: Optional[ + compute.SecurityPolicyReference + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the Google Cloud Armor security policy for the + specified backend service. For more information, see + Google Cloud Armor Overview + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_security_policy(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSecurityPolicyRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.SetSecurityPolicy. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to which the security policy should be + set. The name should conform to RFC1035. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + This corresponds to the ``security_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, region, backend_service, security_policy_reference_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSecurityPolicyRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, compute.SetSecurityPolicyRegionBackendServiceRequest + ): + request = compute.SetSecurityPolicyRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_service is not None: + request.backend_service = backend_service + if security_policy_reference_resource is not None: + request.security_policy_reference_resource = ( + security_policy_reference_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_security_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("backend_service", request.backend_service), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_security_policy( + self, + request: Optional[ + Union[compute.SetSecurityPolicyRegionBackendServiceRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_service: Optional[str] = None, + security_policy_reference_resource: Optional[ + compute.SecurityPolicyReference + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the Google Cloud Armor security policy for the + specified backend service. For more information, see + Google Cloud Armor Overview + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_security_policy(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSecurityPolicyRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.SetSecurityPolicy. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to which the security policy should be + set. The name should conform to RFC1035. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + This corresponds to the ``security_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, region, backend_service, security_policy_reference_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSecurityPolicyRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, compute.SetSecurityPolicyRegionBackendServiceRequest + ): + request = compute.SetSecurityPolicyRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_service is not None: + request.backend_service = backend_service + if security_policy_reference_resource is not None: + request.security_policy_reference_resource = ( + security_policy_reference_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_security_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("backend_service", request.backend_service), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[ + Union[compute.TestIamPermissionsRegionBackendServiceRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[ + compute.TestPermissionsRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsRegionBackendServiceRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.TestIamPermissions. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, region, resource, test_permissions_request_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, compute.TestIamPermissionsRegionBackendServiceRequest + ): + request = compute.TestIamPermissionsRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = ( + test_permissions_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def update_unary( self, request: Optional[ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/pagers.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/pagers.py index 2191092cc864..cde7e8fda2ba 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/pagers.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/pagers.py @@ -87,3 +87,65 @@ def __iter__(self) -> Iterator[compute.BackendService]: def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsablePager: + """A pager for iterating through ``list_usable`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.BackendServiceListUsable` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUsable`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.BackendServiceListUsable` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.BackendServiceListUsable], + request: compute.ListUsableRegionBackendServicesRequest, + response: compute.BackendServiceListUsable, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListUsableRegionBackendServicesRequest): + The initial request object. + response (google.cloud.compute_v1.types.BackendServiceListUsable): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListUsableRegionBackendServicesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.BackendServiceListUsable]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.BackendService]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/transports/base.py index ee7cf96cbb65..63adc4fbccc9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/transports/base.py @@ -157,6 +157,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_usable: gapic_v1.method.wrap_method( + self.list_usable, + default_timeout=None, + client_info=client_info, + ), self.patch: gapic_v1.method.wrap_method( self.patch, default_timeout=None, @@ -167,6 +172,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.set_security_policy: gapic_v1.method.wrap_method( + self.set_security_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), self.update: gapic_v1.method.wrap_method( self.update, default_timeout=None, @@ -240,6 +255,18 @@ def list( ]: raise NotImplementedError() + @property + def list_usable( + self, + ) -> Callable[ + [compute.ListUsableRegionBackendServicesRequest], + Union[ + compute.BackendServiceListUsable, + Awaitable[compute.BackendServiceListUsable], + ], + ]: + raise NotImplementedError() + @property def patch( self, @@ -258,6 +285,26 @@ def set_iam_policy( ]: raise NotImplementedError() + @property + def set_security_policy( + self, + ) -> Callable[ + [compute.SetSecurityPolicyRegionBackendServiceRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsRegionBackendServiceRequest], + Union[ + compute.TestPermissionsResponse, Awaitable[compute.TestPermissionsResponse] + ], + ]: + raise NotImplementedError() + @property def update( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/transports/rest.py index d6325de5358e..46196bf8e781 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/transports/rest.py @@ -111,6 +111,14 @@ def post_list(self, response): logging.log(f"Received response: {response}") return response + def pre_list_usable(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_usable(self, response): + logging.log(f"Received response: {response}") + return response + def pre_patch(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -127,6 +135,22 @@ def post_set_iam_policy(self, response): logging.log(f"Received response: {response}") return response + def pre_set_security_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_security_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -273,6 +297,31 @@ def post_list( """ return response + def pre_list_usable( + self, + request: compute.ListUsableRegionBackendServicesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.ListUsableRegionBackendServicesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_usable + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendServices server. + """ + return request, metadata + + def post_list_usable( + self, response: compute.BackendServiceListUsable + ) -> compute.BackendServiceListUsable: + """Post-rpc interceptor for list_usable + + Override in a subclass to manipulate the response + after it is returned by the RegionBackendServices server but before + it is returned to user code. + """ + return response + def pre_patch( self, request: compute.PatchRegionBackendServiceRequest, @@ -317,6 +366,56 @@ def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """ return response + def pre_set_security_policy( + self, + request: compute.SetSecurityPolicyRegionBackendServiceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.SetSecurityPolicyRegionBackendServiceRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for set_security_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendServices server. + """ + return request, metadata + + def post_set_security_policy( + self, response: compute.Operation + ) -> compute.Operation: + """Post-rpc interceptor for set_security_policy + + Override in a subclass to manipulate the response + after it is returned by the RegionBackendServices server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: compute.TestIamPermissionsRegionBackendServiceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.TestIamPermissionsRegionBackendServiceRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendServices server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: compute.TestPermissionsResponse + ) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the RegionBackendServices server but before + it is returned to user code. + """ + return response + def pre_update( self, request: compute.UpdateRegionBackendServiceRequest, @@ -492,7 +591,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -793,29 +892,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -916,7 +997,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1064,6 +1145,94 @@ def __call__( resp = self._interceptor.post_list(resp) return resp + class _ListUsable(RegionBackendServicesRestStub): + def __hash__(self): + return hash("ListUsable") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.ListUsableRegionBackendServicesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.BackendServiceListUsable: + r"""Call the list usable method over HTTP. + + Args: + request (~.compute.ListUsableRegionBackendServicesRequest): + The request object. A request message for + RegionBackendServices.ListUsable. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.BackendServiceListUsable: + Contains a list of usable + BackendService resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendServices/listUsable", + }, + ] + request, metadata = self._interceptor.pre_list_usable(request, metadata) + pb_request = compute.ListUsableRegionBackendServicesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendServiceListUsable() + pb_resp = compute.BackendServiceListUsable.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_usable(resp) + return resp + class _Patch(RegionBackendServicesRestStub): def __hash__(self): return hash("Patch") @@ -1114,7 +1283,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1228,29 +1397,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1310,6 +1461,219 @@ def __call__( resp = self._interceptor.post_set_iam_policy(resp) return resp + class _SetSecurityPolicy(RegionBackendServicesRestStub): + def __hash__(self): + return hash("SetSecurityPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.SetSecurityPolicyRegionBackendServiceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the set security policy method over HTTP. + + Args: + request (~.compute.SetSecurityPolicyRegionBackendServiceRequest): + The request object. A request message for + RegionBackendServices.SetSecurityPolicy. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}/setSecurityPolicy", + "body": "security_policy_reference_resource", + }, + ] + request, metadata = self._interceptor.pre_set_security_policy( + request, metadata + ) + pb_request = compute.SetSecurityPolicyRegionBackendServiceRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_security_policy(resp) + return resp + + class _TestIamPermissions(RegionBackendServicesRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.TestIamPermissionsRegionBackendServiceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsRegionBackendServiceRequest): + The request object. A request message for + RegionBackendServices.TestIamPermissions. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendServices/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + pb_request = compute.TestIamPermissionsRegionBackendServiceRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + class _Update(RegionBackendServicesRestStub): def __hash__(self): return hash("Update") @@ -1360,7 +1724,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1473,6 +1837,17 @@ def list( # In C++ this would require a dynamic_cast return self._List(self._session, self._host, self._interceptor) # type: ignore + @property + def list_usable( + self, + ) -> Callable[ + [compute.ListUsableRegionBackendServicesRequest], + compute.BackendServiceListUsable, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListUsable(self._session, self._host, self._interceptor) # type: ignore + @property def patch( self, @@ -1489,6 +1864,27 @@ def set_iam_policy( # In C++ this would require a dynamic_cast return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + @property + def set_security_policy( + self, + ) -> Callable[ + [compute.SetSecurityPolicyRegionBackendServiceRequest], compute.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetSecurityPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsRegionBackendServiceRequest], + compute.TestPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + @property def update( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/transports/rest.py index 7c7a451dee69..f0ce7ec5b75f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/transports/rest.py @@ -553,7 +553,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -751,7 +751,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py index 60ddf388d136..f947668f3675 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py @@ -1840,29 +1840,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -3042,29 +3024,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest.py index b2edd9638e81..5f9f526493ad 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest.py @@ -734,7 +734,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -846,7 +846,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -956,7 +956,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1066,7 +1066,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1269,29 +1269,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1392,7 +1374,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1588,7 +1570,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1700,7 +1682,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1814,29 +1796,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1946,7 +1910,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2056,7 +2020,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2168,7 +2132,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2272,7 +2236,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2481,7 +2445,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py index 4811bb6504b6..f1ce678ef39e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py @@ -376,7 +376,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -565,7 +565,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -761,7 +761,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py index 5bf9fc4aa358..42fd27768fcb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py @@ -779,25 +779,30 @@ def sample_get(): Returns: google.cloud.compute_v1.types.HealthCheck: - Represents a Health Check resource. Google Compute - Engine has two Health Check resources: \* - [Global](/compute/docs/reference/rest/v1/healthChecks) - \* + Represents a health check resource. Google Compute + Engine has two health check resources: \* [Regional](/compute/docs/reference/rest/v1/regionHealthChecks) - Internal HTTP(S) load balancers must use regional health - checks (compute.v1.regionHealthChecks). Traffic Director - must use global health checks (compute.v1.healthChecks). - Internal TCP/UDP load balancers can use either regional - or global health checks (compute.v1.regionHealthChecks - or compute.v1.healthChecks). External HTTP(S), TCP - proxy, and SSL proxy load balancers as well as managed - instance group auto-healing must use global health - checks (compute.v1.healthChecks). Backend service-based - network load balancers must use regional health checks - (compute.v1.regionHealthChecks). Target pool-based - network load balancers must use legacy HTTP health - checks (compute.v1.httpHealthChecks). For more - information, see Health checks overview. + \* + [Global](/compute/docs/reference/rest/v1/healthChecks) + These health check resources can be used for load + balancing and for autohealing VMs in a managed instance + group (MIG). **Load balancing** The following load + balancer can use either regional or global health check: + \* Internal TCP/UDP load balancer The following load + balancers require regional health check: \* Internal + HTTP(S) load balancer \* Backend service-based network + load balancer Traffic Director and the following load + balancers require global health check: \* External + HTTP(S) load balancer \* TCP proxy load balancer \* SSL + proxy load balancer The following load balancer require + [legacy HTTP health + checks](/compute/docs/reference/rest/v1/httpHealthChecks): + \* Target pool-based network load balancer **Autohealing + in MIGs** The health checks that you use for autohealing + VMs in a MIG can be either regional or global. For more + information, see Set up an application health check and + autohealing. For more information, see Health checks + overview. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/transports/rest.py index 1125bb13817d..829c586536e6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/transports/rest.py @@ -397,7 +397,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -485,26 +485,30 @@ def __call__( Returns: ~.compute.HealthCheck: - Represents a Health Check resource. Google Compute - Engine has two Health Check resources: \* - `Global `__ - \* + Represents a health check resource. Google Compute + Engine has two health check resources: \* `Regional `__ - Internal HTTP(S) load balancers must use regional health - checks (``compute.v1.regionHealthChecks``). Traffic - Director must use global health checks - (``compute.v1.healthChecks``). Internal TCP/UDP load - balancers can use either regional or global health - checks (``compute.v1.regionHealthChecks`` or - ``compute.v1.healthChecks``). External HTTP(S), TCP - proxy, and SSL proxy load balancers as well as managed - instance group auto-healing must use global health - checks (``compute.v1.healthChecks``). Backend - service-based network load balancers must use regional - health checks (``compute.v1.regionHealthChecks``). - Target pool-based network load balancers must use legacy - HTTP health checks (``compute.v1.httpHealthChecks``). - For more information, see Health checks overview. + \* + `Global `__ + These health check resources can be used for load + balancing and for autohealing VMs in a managed instance + group (MIG). **Load balancing** The following load + balancer can use either regional or global health check: + \* Internal TCP/UDP load balancer The following load + balancers require regional health check: \* Internal + HTTP(S) load balancer \* Backend service-based network + load balancer Traffic Director and the following load + balancers require global health check: \* External + HTTP(S) load balancer \* TCP proxy load balancer \* SSL + proxy load balancer The following load balancer require + `legacy HTTP health + checks `__: + \* Target pool-based network load balancer **Autohealing + in MIGs** The health checks that you use for autohealing + VMs in a MIG can be either regional or global. For more + information, see Set up an application health check and + autohealing. For more information, see Health checks + overview. """ @@ -604,7 +608,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -802,7 +806,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -912,7 +916,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py index 1e4d514b2782..613d24b7381d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py @@ -842,7 +842,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -957,7 +957,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1073,7 +1073,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1187,7 +1187,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1288,7 +1288,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1403,7 +1403,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1614,7 +1614,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2086,7 +2086,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2197,7 +2197,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2313,7 +2313,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2429,7 +2429,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2530,7 +2530,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2644,7 +2644,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2759,7 +2759,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py index c4f25d1f1660..e4389b2ec98b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py @@ -630,7 +630,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py index 41f2b491bbf4..7a77c401cbe4 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py @@ -783,11 +783,19 @@ def sample_get(): Returns: google.cloud.compute_v1.types.InstanceTemplate: - Represents an Instance Template - resource. You can use instance templates - to create VM instances and managed - instance groups. For more information, - read Instance Templates. + Represents an Instance Template resource. Google Compute + Engine has two Instance Template resources: \* + [Global](/compute/docs/reference/rest/v1/instanceTemplates) + \* + [Regional](/compute/docs/reference/rest/v1/regionInstanceTemplates) + You can reuse a global instance template in different + regions whereas you can use a regional instance template + in a specified region only. If you want to reduce + cross-region dependency or achieve data residency, use a + regional instance template. To create VMs, managed + instance groups, and reservations, you can use either + global or regional instance templates. For more + information, read Instance Templates. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/transports/rest.py index 5337ee787fed..4e8f4e3b8436 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/transports/rest.py @@ -341,7 +341,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -429,11 +429,19 @@ def __call__( Returns: ~.compute.InstanceTemplate: - Represents an Instance Template - resource. You can use instance templates - to create VM instances and managed - instance groups. For more information, - read Instance Templates. + Represents an Instance Template resource. Google Compute + Engine has two Instance Template resources: \* + `Global `__ + \* + `Regional `__ + You can reuse a global instance template in different + regions whereas you can use a regional instance template + in a specified region only. If you want to reduce + cross-region dependency or achieve data residency, use a + regional instance template. To create VMs, managed + instance groups, and reservations, you can use either + global or regional instance templates. For more + information, read Instance Templates. """ @@ -533,7 +541,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/transports/rest.py index 8bb8285aaedc..e58c9d602359 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/transports/rest.py @@ -252,7 +252,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py index af9da3c6d52c..c50b9707a44b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py @@ -425,22 +425,666 @@ def __init__( api_audience=client_options.api_audience, ) + def attach_network_endpoints_unary( + self, + request: Optional[ + Union[compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + region_network_endpoint_groups_attach_endpoints_request_resource: Optional[ + compute.RegionNetworkEndpointGroupsAttachEndpointsRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Attach a list of network endpoints to the specified + network endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_attach_network_endpoints(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.attach_network_endpoints(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, dict]): + The request object. A request message for + RegionNetworkEndpointGroups.AttachNetworkEndpoints. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where you want + to create the network endpoint group. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group where you are attaching network + endpoints to. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_network_endpoint_groups_attach_endpoints_request_resource (google.cloud.compute_v1.types.RegionNetworkEndpointGroupsAttachEndpointsRequest): + The body resource for this request + This corresponds to the ``region_network_endpoint_groups_attach_endpoints_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [ + project, + region, + network_endpoint_group, + region_network_endpoint_groups_attach_endpoints_request_resource, + ] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest + ): + request = compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + if ( + region_network_endpoint_groups_attach_endpoints_request_resource + is not None + ): + request.region_network_endpoint_groups_attach_endpoints_request_resource = ( + region_network_endpoint_groups_attach_endpoints_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.attach_network_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("network_endpoint_group", request.network_endpoint_group), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def attach_network_endpoints( + self, + request: Optional[ + Union[compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + region_network_endpoint_groups_attach_endpoints_request_resource: Optional[ + compute.RegionNetworkEndpointGroupsAttachEndpointsRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Attach a list of network endpoints to the specified + network endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_attach_network_endpoints(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.attach_network_endpoints(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, dict]): + The request object. A request message for + RegionNetworkEndpointGroups.AttachNetworkEndpoints. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where you want + to create the network endpoint group. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group where you are attaching network + endpoints to. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_network_endpoint_groups_attach_endpoints_request_resource (google.cloud.compute_v1.types.RegionNetworkEndpointGroupsAttachEndpointsRequest): + The body resource for this request + This corresponds to the ``region_network_endpoint_groups_attach_endpoints_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [ + project, + region, + network_endpoint_group, + region_network_endpoint_groups_attach_endpoints_request_resource, + ] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest + ): + request = compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + if ( + region_network_endpoint_groups_attach_endpoints_request_resource + is not None + ): + request.region_network_endpoint_groups_attach_endpoints_request_resource = ( + region_network_endpoint_groups_attach_endpoints_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.attach_network_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("network_endpoint_group", request.network_endpoint_group), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def delete_unary( self, request: Optional[ - Union[compute.DeleteRegionNetworkEndpointGroupRequest, dict] + Union[compute.DeleteRegionNetworkEndpointGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified network endpoint group. Note + that the NEG cannot be deleted if it is configured as a + backend of a backend service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionNetworkEndpointGroupRequest, dict]): + The request object. A request message for + RegionNetworkEndpointGroups.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group to delete. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_endpoint_group]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionNetworkEndpointGroupRequest): + request = compute.DeleteRegionNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("network_endpoint_group", request.network_endpoint_group), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete( + self, + request: Optional[ + Union[compute.DeleteRegionNetworkEndpointGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified network endpoint group. Note + that the NEG cannot be deleted if it is configured as a + backend of a backend service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionNetworkEndpointGroupRequest, dict]): + The request object. A request message for + RegionNetworkEndpointGroups.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group to delete. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_endpoint_group]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionNetworkEndpointGroupRequest): + request = compute.DeleteRegionNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("network_endpoint_group", request.network_endpoint_group), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def detach_network_endpoints_unary( + self, + request: Optional[ + Union[compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, dict] ] = None, *, project: Optional[str] = None, region: Optional[str] = None, network_endpoint_group: Optional[str] = None, + region_network_endpoint_groups_detach_endpoints_request_resource: Optional[ + compute.RegionNetworkEndpointGroupsDetachEndpointsRequest + ] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: - r"""Deletes the specified network endpoint group. Note - that the NEG cannot be deleted if it is configured as a - backend of a backend service. + r"""Detach the network endpoint from the specified + network endpoint group. .. code-block:: python @@ -453,28 +1097,28 @@ def delete_unary( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import compute_v1 - def sample_delete(): + def sample_detach_network_endpoints(): # Create a client client = compute_v1.RegionNetworkEndpointGroupsClient() # Initialize request argument(s) - request = compute_v1.DeleteRegionNetworkEndpointGroupRequest( + request = compute_v1.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest( network_endpoint_group="network_endpoint_group_value", project="project_value", region="region_value", ) # Make the request - response = client.delete(request=request) + response = client.detach_network_endpoints(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.compute_v1.types.DeleteRegionNetworkEndpointGroupRequest, dict]): + request (Union[google.cloud.compute_v1.types.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, dict]): The request object. A request message for - RegionNetworkEndpointGroups.Delete. See - the method description for details. + RegionNetworkEndpointGroups.DetachNetworkEndpoints. + See the method description for details. project (str): Project ID for this request. This corresponds to the ``project`` field @@ -490,12 +1134,18 @@ def sample_delete(): should not be set. network_endpoint_group (str): The name of the network endpoint - group to delete. It should comply with + group you are detaching network + endpoints from. It should comply with RFC1035. This corresponds to the ``network_endpoint_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + region_network_endpoint_groups_detach_endpoints_request_resource (google.cloud.compute_v1.types.RegionNetworkEndpointGroupsDetachEndpointsRequest): + The body resource for this request + This corresponds to the ``region_network_endpoint_groups_detach_endpoints_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -511,7 +1161,14 @@ def sample_delete(): # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project, region, network_endpoint_group]) + has_flattened_params = any( + [ + project, + region, + network_endpoint_group, + region_network_endpoint_groups_detach_endpoints_request_resource, + ] + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -519,11 +1176,15 @@ def sample_delete(): ) # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionNetworkEndpointGroupRequest. + # in a compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, compute.DeleteRegionNetworkEndpointGroupRequest): - request = compute.DeleteRegionNetworkEndpointGroupRequest(request) + if not isinstance( + request, compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest + ): + request = compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest( + request + ) # If we have keyword arguments corresponding to fields on the # request, apply these. if project is not None: @@ -532,10 +1193,17 @@ def sample_delete(): request.region = region if network_endpoint_group is not None: request.network_endpoint_group = network_endpoint_group + if ( + region_network_endpoint_groups_detach_endpoints_request_resource + is not None + ): + request.region_network_endpoint_groups_detach_endpoints_request_resource = ( + region_network_endpoint_groups_detach_endpoints_request_resource + ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete] + rpc = self._transport._wrapped_methods[self._transport.detach_network_endpoints] # Certain fields should be provided within the metadata header; # add these here. @@ -560,22 +1228,24 @@ def sample_delete(): # Done; return the response. return response - def delete( + def detach_network_endpoints( self, request: Optional[ - Union[compute.DeleteRegionNetworkEndpointGroupRequest, dict] + Union[compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, dict] ] = None, *, project: Optional[str] = None, region: Optional[str] = None, network_endpoint_group: Optional[str] = None, + region_network_endpoint_groups_detach_endpoints_request_resource: Optional[ + compute.RegionNetworkEndpointGroupsDetachEndpointsRequest + ] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> extended_operation.ExtendedOperation: - r"""Deletes the specified network endpoint group. Note - that the NEG cannot be deleted if it is configured as a - backend of a backend service. + r"""Detach the network endpoint from the specified + network endpoint group. .. code-block:: python @@ -588,28 +1258,28 @@ def delete( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import compute_v1 - def sample_delete(): + def sample_detach_network_endpoints(): # Create a client client = compute_v1.RegionNetworkEndpointGroupsClient() # Initialize request argument(s) - request = compute_v1.DeleteRegionNetworkEndpointGroupRequest( + request = compute_v1.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest( network_endpoint_group="network_endpoint_group_value", project="project_value", region="region_value", ) # Make the request - response = client.delete(request=request) + response = client.detach_network_endpoints(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.compute_v1.types.DeleteRegionNetworkEndpointGroupRequest, dict]): + request (Union[google.cloud.compute_v1.types.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, dict]): The request object. A request message for - RegionNetworkEndpointGroups.Delete. See - the method description for details. + RegionNetworkEndpointGroups.DetachNetworkEndpoints. + See the method description for details. project (str): Project ID for this request. This corresponds to the ``project`` field @@ -625,12 +1295,18 @@ def sample_delete(): should not be set. network_endpoint_group (str): The name of the network endpoint - group to delete. It should comply with + group you are detaching network + endpoints from. It should comply with RFC1035. This corresponds to the ``network_endpoint_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + region_network_endpoint_groups_detach_endpoints_request_resource (google.cloud.compute_v1.types.RegionNetworkEndpointGroupsDetachEndpointsRequest): + The body resource for this request + This corresponds to the ``region_network_endpoint_groups_detach_endpoints_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -646,7 +1322,14 @@ def sample_delete(): # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project, region, network_endpoint_group]) + has_flattened_params = any( + [ + project, + region, + network_endpoint_group, + region_network_endpoint_groups_detach_endpoints_request_resource, + ] + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -654,11 +1337,15 @@ def sample_delete(): ) # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionNetworkEndpointGroupRequest. + # in a compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, compute.DeleteRegionNetworkEndpointGroupRequest): - request = compute.DeleteRegionNetworkEndpointGroupRequest(request) + if not isinstance( + request, compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest + ): + request = compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest( + request + ) # If we have keyword arguments corresponding to fields on the # request, apply these. if project is not None: @@ -667,10 +1354,17 @@ def sample_delete(): request.region = region if network_endpoint_group is not None: request.network_endpoint_group = network_endpoint_group + if ( + region_network_endpoint_groups_detach_endpoints_request_resource + is not None + ): + request.region_network_endpoint_groups_detach_endpoints_request_resource = ( + region_network_endpoint_groups_detach_endpoints_request_resource + ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete] + rpc = self._transport._wrapped_methods[self._transport.detach_network_endpoints] # Certain fields should be provided within the metadata header; # add these here. @@ -801,12 +1495,9 @@ def sample_get(): (NEG) defines how a set of endpoints should be reached, whether they are reachable, and where they are located. - For more information about using NEGs, - see Setting up external HTTP(S) Load - Balancing with internet NEGs, Setting up - zonal NEGs, or Setting up external - HTTP(S) Load Balancing with serverless - NEGs. + For more information about using NEGs + for different use cases, see Network + endpoint groups overview. """ # Create or coerce a protobuf request object. @@ -1283,6 +1974,156 @@ def sample_list(): # Done; return the response. return response + def list_network_endpoints( + self, + request: Optional[ + Union[compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNetworkEndpointsPager: + r"""Lists the network endpoints in the specified network + endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_network_endpoints(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list_network_endpoints(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest, dict]): + The request object. A request message for + RegionNetworkEndpointGroups.ListNetworkEndpoints. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group from which you want to generate a + list of included network endpoints. It + should comply with RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_network_endpoint_groups.pagers.ListNetworkEndpointsPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_endpoint_group]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest + ): + request = compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_network_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("network_endpoint_group", request.network_endpoint_group), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListNetworkEndpointsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "RegionNetworkEndpointGroupsClient": return self diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/pagers.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/pagers.py index b58e0503909d..52819abb2d6a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/pagers.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/pagers.py @@ -87,3 +87,67 @@ def __iter__(self) -> Iterator[compute.NetworkEndpointGroup]: def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListNetworkEndpointsPager: + """A pager for iterating through ``list_network_endpoints`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NetworkEndpointGroupsListNetworkEndpoints` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListNetworkEndpoints`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NetworkEndpointGroupsListNetworkEndpoints` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.NetworkEndpointGroupsListNetworkEndpoints], + request: compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest, + response: compute.NetworkEndpointGroupsListNetworkEndpoints, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.NetworkEndpointGroupsListNetworkEndpoints): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NetworkEndpointGroupsListNetworkEndpoints]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.NetworkEndpointWithHealthStatus]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/base.py index 862cd596f6df..b011cb11de9c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/base.py @@ -127,11 +127,21 @@ def __init__( def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { + self.attach_network_endpoints: gapic_v1.method.wrap_method( + self.attach_network_endpoints, + default_timeout=None, + client_info=client_info, + ), self.delete: gapic_v1.method.wrap_method( self.delete, default_timeout=None, client_info=client_info, ), + self.detach_network_endpoints: gapic_v1.method.wrap_method( + self.detach_network_endpoints, + default_timeout=None, + client_info=client_info, + ), self.get: gapic_v1.method.wrap_method( self.get, default_timeout=None, @@ -147,6 +157,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_network_endpoints: gapic_v1.method.wrap_method( + self.list_network_endpoints, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -158,6 +173,15 @@ def close(self): """ raise NotImplementedError() + @property + def attach_network_endpoints( + self, + ) -> Callable[ + [compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def delete( self, @@ -167,6 +191,15 @@ def delete( ]: raise NotImplementedError() + @property + def detach_network_endpoints( + self, + ) -> Callable[ + [compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def get( self, @@ -197,6 +230,18 @@ def list( ]: raise NotImplementedError() + @property + def list_network_endpoints( + self, + ) -> Callable[ + [compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest], + Union[ + compute.NetworkEndpointGroupsListNetworkEndpoints, + Awaitable[compute.NetworkEndpointGroupsListNetworkEndpoints], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py index be1b6794fda2..099fd25ddd06 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py @@ -63,6 +63,14 @@ class RegionNetworkEndpointGroupsRestInterceptor: .. code-block:: python class MyCustomRegionNetworkEndpointGroupsInterceptor(RegionNetworkEndpointGroupsRestInterceptor): + def pre_attach_network_endpoints(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_attach_network_endpoints(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -71,6 +79,14 @@ def post_delete(self, response): logging.log(f"Received response: {response}") return response + def pre_detach_network_endpoints(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_detach_network_endpoints(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -95,12 +111,46 @@ def post_list(self, response): logging.log(f"Received response: {response}") return response + def pre_list_network_endpoints(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_network_endpoints(self, response): + logging.log(f"Received response: {response}") + return response + transport = RegionNetworkEndpointGroupsRestTransport(interceptor=MyCustomRegionNetworkEndpointGroupsInterceptor()) client = RegionNetworkEndpointGroupsClient(transport=transport) """ + def pre_attach_network_endpoints( + self, + request: compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for attach_network_endpoints + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkEndpointGroups server. + """ + return request, metadata + + def post_attach_network_endpoints( + self, response: compute.Operation + ) -> compute.Operation: + """Post-rpc interceptor for attach_network_endpoints + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_delete( self, request: compute.DeleteRegionNetworkEndpointGroupRequest, @@ -124,6 +174,32 @@ def post_delete(self, response: compute.Operation) -> compute.Operation: """ return response + def pre_detach_network_endpoints( + self, + request: compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for detach_network_endpoints + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkEndpointGroups server. + """ + return request, metadata + + def post_detach_network_endpoints( + self, response: compute.Operation + ) -> compute.Operation: + """Post-rpc interceptor for detach_network_endpoints + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_get( self, request: compute.GetRegionNetworkEndpointGroupRequest, @@ -195,6 +271,32 @@ def post_list( """ return response + def pre_list_network_endpoints( + self, + request: compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_network_endpoints + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkEndpointGroups server. + """ + return request, metadata + + def post_list_network_endpoints( + self, response: compute.NetworkEndpointGroupsListNetworkEndpoints + ) -> compute.NetworkEndpointGroupsListNetworkEndpoints: + """Post-rpc interceptor for list_network_endpoints + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkEndpointGroups server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class RegionNetworkEndpointGroupsRestStub: @@ -299,6 +401,122 @@ def __init__( self._interceptor = interceptor or RegionNetworkEndpointGroupsRestInterceptor() self._prep_wrapped_messages(client_info) + class _AttachNetworkEndpoints(RegionNetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("AttachNetworkEndpoints") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the attach network endpoints method over HTTP. + + Args: + request (~.compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest): + The request object. A request message for + RegionNetworkEndpointGroups.AttachNetworkEndpoints. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints", + "body": "region_network_endpoint_groups_attach_endpoints_request_resource", + }, + ] + request, metadata = self._interceptor.pre_attach_network_endpoints( + request, metadata + ) + pb_request = ( + compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_attach_network_endpoints(resp) + return resp + class _Delete(RegionNetworkEndpointGroupsRestStub): def __hash__(self): return hash("Delete") @@ -349,7 +567,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -400,6 +618,122 @@ def __call__( resp = self._interceptor.post_delete(resp) return resp + class _DetachNetworkEndpoints(RegionNetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("DetachNetworkEndpoints") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the detach network endpoints method over HTTP. + + Args: + request (~.compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest): + The request object. A request message for + RegionNetworkEndpointGroups.DetachNetworkEndpoints. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints", + "body": "region_network_endpoint_groups_detach_endpoints_request_resource", + }, + ] + request, metadata = self._interceptor.pre_detach_network_endpoints( + request, metadata + ) + pb_request = ( + compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_detach_network_endpoints(resp) + return resp + class _Get(RegionNetworkEndpointGroupsRestStub): def __hash__(self): return hash("Get") @@ -442,12 +776,9 @@ def __call__( (NEG) defines how a set of endpoints should be reached, whether they are reachable, and where they are located. - For more information about using NEGs, - see Setting up external HTTP(S) Load - Balancing with internet NEGs, Setting up - zonal NEGs, or Setting up external - HTTP(S) Load Balancing with serverless - NEGs. + For more information about using NEGs + for different use cases, see Network + endpoint groups overview. """ @@ -547,7 +878,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -693,6 +1024,109 @@ def __call__( resp = self._interceptor.post_list(resp) return resp + class _ListNetworkEndpoints(RegionNetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("ListNetworkEndpoints") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.NetworkEndpointGroupsListNetworkEndpoints: + r"""Call the list network endpoints method over HTTP. + + Args: + request (~.compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest): + The request object. A request message for + RegionNetworkEndpointGroups.ListNetworkEndpoints. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkEndpointGroupsListNetworkEndpoints: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}/listNetworkEndpoints", + }, + ] + request, metadata = self._interceptor.pre_list_network_endpoints( + request, metadata + ) + pb_request = ( + compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkEndpointGroupsListNetworkEndpoints() + pb_resp = compute.NetworkEndpointGroupsListNetworkEndpoints.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_network_endpoints(resp) + return resp + + @property + def attach_network_endpoints( + self, + ) -> Callable[ + [compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest], + compute.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AttachNetworkEndpoints(self._session, self._host, self._interceptor) # type: ignore + @property def delete( self, @@ -701,6 +1135,17 @@ def delete( # In C++ this would require a dynamic_cast return self._Delete(self._session, self._host, self._interceptor) # type: ignore + @property + def detach_network_endpoints( + self, + ) -> Callable[ + [compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest], + compute.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DetachNetworkEndpoints(self._session, self._host, self._interceptor) # type: ignore + @property def get( self, @@ -730,6 +1175,17 @@ def list( # In C++ this would require a dynamic_cast return self._List(self._session, self._host, self._interceptor) # type: ignore + @property + def list_network_endpoints( + self, + ) -> Callable[ + [compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest], + compute.NetworkEndpointGroupsListNetworkEndpoints, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListNetworkEndpoints(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py index abc3b4f48812..e91879c36cbb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py @@ -2115,29 +2115,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -4043,29 +4025,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/transports/rest.py index 50ec73f9552e..8c3a2b012ef1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/transports/rest.py @@ -772,7 +772,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -884,7 +884,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -994,7 +994,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1097,7 +1097,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1476,29 +1476,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1691,7 +1673,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1887,7 +1869,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1997,7 +1979,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2107,7 +2089,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2212,7 +2194,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -2319,29 +2301,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py index fdbf539d363d..70832fba4a99 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py @@ -349,7 +349,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -543,7 +543,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py index 037696d83426..15cff07b9e4c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py @@ -634,7 +634,7 @@ def sample_get(): - For global operations, use the globalOperations resource. - For regional operations, use the regionOperations resource. - For zonal operations, use - the zonalOperations resource. For more information, read + the zoneOperations resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -915,7 +915,7 @@ def sample_wait(): - For global operations, use the globalOperations resource. - For regional operations, use the regionOperations resource. - For zonal operations, use - the zonalOperations resource. For more information, read + the zoneOperations resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/transports/rest.py index d1b2452b92ea..b512b35a92dd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/transports/rest.py @@ -430,7 +430,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -619,7 +619,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py index 0c306bab2eb0..4de13a906e46 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py @@ -423,20 +423,21 @@ def __init__( api_audience=client_options.api_audience, ) - def delete_unary( + def add_rule_unary( self, request: Optional[ - Union[compute.DeleteRegionSecurityPolicyRequest, dict] + Union[compute.AddRuleRegionSecurityPolicyRequest, dict] ] = None, *, project: Optional[str] = None, region: Optional[str] = None, security_policy: Optional[str] = None, + security_policy_rule_resource: Optional[compute.SecurityPolicyRule] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: - r"""Deletes the specified policy. + r"""Inserts a rule into a security policy. .. code-block:: python @@ -449,27 +450,27 @@ def delete_unary( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import compute_v1 - def sample_delete(): + def sample_add_rule(): # Create a client client = compute_v1.RegionSecurityPoliciesClient() # Initialize request argument(s) - request = compute_v1.DeleteRegionSecurityPolicyRequest( + request = compute_v1.AddRuleRegionSecurityPolicyRequest( project="project_value", region="region_value", security_policy="security_policy_value", ) # Make the request - response = client.delete(request=request) + response = client.add_rule(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.compute_v1.types.DeleteRegionSecurityPolicyRequest, dict]): + request (Union[google.cloud.compute_v1.types.AddRuleRegionSecurityPolicyRequest, dict]): The request object. A request message for - RegionSecurityPolicies.Delete. See the + RegionSecurityPolicies.AddRule. See the method description for details. project (str): Project ID for this request. @@ -485,11 +486,16 @@ def sample_delete(): should not be set. security_policy (str): Name of the security policy to - delete. + update. This corresponds to the ``security_policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + security_policy_rule_resource (google.cloud.compute_v1.types.SecurityPolicyRule): + The body resource for this request + This corresponds to the ``security_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -505,7 +511,9 @@ def sample_delete(): # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project, region, security_policy]) + has_flattened_params = any( + [project, region, security_policy, security_policy_rule_resource] + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -513,11 +521,11 @@ def sample_delete(): ) # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionSecurityPolicyRequest. + # in a compute.AddRuleRegionSecurityPolicyRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, compute.DeleteRegionSecurityPolicyRequest): - request = compute.DeleteRegionSecurityPolicyRequest(request) + if not isinstance(request, compute.AddRuleRegionSecurityPolicyRequest): + request = compute.AddRuleRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if project is not None: @@ -526,10 +534,12 @@ def sample_delete(): request.region = region if security_policy is not None: request.security_policy = security_policy + if security_policy_rule_resource is not None: + request.security_policy_rule_resource = security_policy_rule_resource # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete] + rpc = self._transport._wrapped_methods[self._transport.add_rule] # Certain fields should be provided within the metadata header; # add these here. @@ -554,20 +564,21 @@ def sample_delete(): # Done; return the response. return response - def delete( + def add_rule( self, request: Optional[ - Union[compute.DeleteRegionSecurityPolicyRequest, dict] + Union[compute.AddRuleRegionSecurityPolicyRequest, dict] ] = None, *, project: Optional[str] = None, region: Optional[str] = None, security_policy: Optional[str] = None, + security_policy_rule_resource: Optional[compute.SecurityPolicyRule] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> extended_operation.ExtendedOperation: - r"""Deletes the specified policy. + r"""Inserts a rule into a security policy. .. code-block:: python @@ -580,27 +591,27 @@ def delete( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import compute_v1 - def sample_delete(): + def sample_add_rule(): # Create a client client = compute_v1.RegionSecurityPoliciesClient() # Initialize request argument(s) - request = compute_v1.DeleteRegionSecurityPolicyRequest( + request = compute_v1.AddRuleRegionSecurityPolicyRequest( project="project_value", region="region_value", security_policy="security_policy_value", ) # Make the request - response = client.delete(request=request) + response = client.add_rule(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.compute_v1.types.DeleteRegionSecurityPolicyRequest, dict]): + request (Union[google.cloud.compute_v1.types.AddRuleRegionSecurityPolicyRequest, dict]): The request object. A request message for - RegionSecurityPolicies.Delete. See the + RegionSecurityPolicies.AddRule. See the method description for details. project (str): Project ID for this request. @@ -616,11 +627,16 @@ def sample_delete(): should not be set. security_policy (str): Name of the security policy to - delete. + update. This corresponds to the ``security_policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + security_policy_rule_resource (google.cloud.compute_v1.types.SecurityPolicyRule): + The body resource for this request + This corresponds to the ``security_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -636,7 +652,9 @@ def sample_delete(): # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project, region, security_policy]) + has_flattened_params = any( + [project, region, security_policy, security_policy_rule_resource] + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -644,11 +662,11 @@ def sample_delete(): ) # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionSecurityPolicyRequest. + # in a compute.AddRuleRegionSecurityPolicyRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, compute.DeleteRegionSecurityPolicyRequest): - request = compute.DeleteRegionSecurityPolicyRequest(request) + if not isinstance(request, compute.AddRuleRegionSecurityPolicyRequest): + request = compute.AddRuleRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if project is not None: @@ -657,10 +675,12 @@ def sample_delete(): request.region = region if security_policy is not None: request.security_policy = security_policy + if security_policy_rule_resource is not None: + request.security_policy_rule_resource = security_policy_rule_resource # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete] + rpc = self._transport._wrapped_methods[self._transport.add_rule] # Certain fields should be provided within the metadata header; # add these here. @@ -710,9 +730,11 @@ def error_code(self): # Done; return the response. return response - def get( + def delete_unary( self, - request: Optional[Union[compute.GetRegionSecurityPolicyRequest, dict]] = None, + request: Optional[ + Union[compute.DeleteRegionSecurityPolicyRequest, dict] + ] = None, *, project: Optional[str] = None, region: Optional[str] = None, @@ -720,9 +742,8 @@ def get( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> compute.SecurityPolicy: - r"""List all of the ordered rules present in a single - specified policy. + ) -> compute.Operation: + r"""Deletes the specified policy. .. code-block:: python @@ -735,27 +756,27 @@ def get( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import compute_v1 - def sample_get(): + def sample_delete(): # Create a client client = compute_v1.RegionSecurityPoliciesClient() # Initialize request argument(s) - request = compute_v1.GetRegionSecurityPolicyRequest( + request = compute_v1.DeleteRegionSecurityPolicyRequest( project="project_value", region="region_value", security_policy="security_policy_value", ) # Make the request - response = client.get(request=request) + response = client.delete(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.compute_v1.types.GetRegionSecurityPolicyRequest, dict]): + request (Union[google.cloud.compute_v1.types.DeleteRegionSecurityPolicyRequest, dict]): The request object. A request message for - RegionSecurityPolicies.Get. See the + RegionSecurityPolicies.Delete. See the method description for details. project (str): Project ID for this request. @@ -770,7 +791,9 @@ def sample_get(): on the ``request`` instance; if ``request`` is provided, this should not be set. security_policy (str): - Name of the security policy to get. + Name of the security policy to + delete. + This corresponds to the ``security_policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -781,13 +804,9 @@ def sample_get(): sent along with the request as metadata. Returns: - google.cloud.compute_v1.types.SecurityPolicy: - Represents a Google Cloud Armor - security policy resource. Only external - backend services that use load balancers - can reference a security policy. For - more information, see Google Cloud Armor - security policy overview. + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. """ # Create or coerce a protobuf request object. @@ -801,11 +820,11 @@ def sample_get(): ) # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionSecurityPolicyRequest. + # in a compute.DeleteRegionSecurityPolicyRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, compute.GetRegionSecurityPolicyRequest): - request = compute.GetRegionSecurityPolicyRequest(request) + if not isinstance(request, compute.DeleteRegionSecurityPolicyRequest): + request = compute.DeleteRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if project is not None: @@ -817,7 +836,7 @@ def sample_get(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get] + rpc = self._transport._wrapped_methods[self._transport.delete] # Certain fields should be provided within the metadata header; # add these here. @@ -842,21 +861,20 @@ def sample_get(): # Done; return the response. return response - def insert_unary( + def delete( self, request: Optional[ - Union[compute.InsertRegionSecurityPolicyRequest, dict] + Union[compute.DeleteRegionSecurityPolicyRequest, dict] ] = None, *, project: Optional[str] = None, region: Optional[str] = None, - security_policy_resource: Optional[compute.SecurityPolicy] = None, + security_policy: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> compute.Operation: - r"""Creates a new policy in the specified project using - the data included in the request. + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified policy. .. code-block:: python @@ -869,26 +887,27 @@ def insert_unary( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import compute_v1 - def sample_insert(): + def sample_delete(): # Create a client client = compute_v1.RegionSecurityPoliciesClient() # Initialize request argument(s) - request = compute_v1.InsertRegionSecurityPolicyRequest( + request = compute_v1.DeleteRegionSecurityPolicyRequest( project="project_value", region="region_value", + security_policy="security_policy_value", ) # Make the request - response = client.insert(request=request) + response = client.delete(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.compute_v1.types.InsertRegionSecurityPolicyRequest, dict]): + request (Union[google.cloud.compute_v1.types.DeleteRegionSecurityPolicyRequest, dict]): The request object. A request message for - RegionSecurityPolicies.Insert. See the + RegionSecurityPolicies.Delete. See the method description for details. project (str): Project ID for this request. @@ -902,9 +921,11 @@ def sample_insert(): This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): - The body resource for this request - This corresponds to the ``security_policy_resource`` field + security_policy (str): + Name of the security policy to + delete. + + This corresponds to the ``security_policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -922,7 +943,7 @@ def sample_insert(): # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project, region, security_policy_resource]) + has_flattened_params = any([project, region, security_policy]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -930,23 +951,23 @@ def sample_insert(): ) # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionSecurityPolicyRequest. + # in a compute.DeleteRegionSecurityPolicyRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, compute.InsertRegionSecurityPolicyRequest): - request = compute.InsertRegionSecurityPolicyRequest(request) + if not isinstance(request, compute.DeleteRegionSecurityPolicyRequest): + request = compute.DeleteRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if project is not None: request.project = project if region is not None: request.region = region - if security_policy_resource is not None: - request.security_policy_resource = security_policy_resource + if security_policy is not None: + request.security_policy = security_policy # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.insert] + rpc = self._transport._wrapped_methods[self._transport.delete] # Certain fields should be provided within the metadata header; # add these here. @@ -955,6 +976,7 @@ def sample_insert(): ( ("project", request.project), ("region", request.region), + ("security_policy", request.security_policy), ) ), ) @@ -967,24 +989,47 @@ def sample_insert(): metadata=metadata, ) + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + # Done; return the response. return response - def insert( + def get( self, - request: Optional[ - Union[compute.InsertRegionSecurityPolicyRequest, dict] - ] = None, + request: Optional[Union[compute.GetRegionSecurityPolicyRequest, dict]] = None, *, project: Optional[str] = None, region: Optional[str] = None, - security_policy_resource: Optional[compute.SecurityPolicy] = None, + security_policy: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> extended_operation.ExtendedOperation: - r"""Creates a new policy in the specified project using - the data included in the request. + ) -> compute.SecurityPolicy: + r"""List all of the ordered rules present in a single + specified policy. .. code-block:: python @@ -997,26 +1042,27 @@ def insert( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import compute_v1 - def sample_insert(): + def sample_get(): # Create a client client = compute_v1.RegionSecurityPoliciesClient() # Initialize request argument(s) - request = compute_v1.InsertRegionSecurityPolicyRequest( + request = compute_v1.GetRegionSecurityPolicyRequest( project="project_value", region="region_value", + security_policy="security_policy_value", ) # Make the request - response = client.insert(request=request) + response = client.get(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.compute_v1.types.InsertRegionSecurityPolicyRequest, dict]): + request (Union[google.cloud.compute_v1.types.GetRegionSecurityPolicyRequest, dict]): The request object. A request message for - RegionSecurityPolicies.Insert. See the + RegionSecurityPolicies.Get. See the method description for details. project (str): Project ID for this request. @@ -1030,9 +1076,9 @@ def sample_insert(): This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): - The body resource for this request - This corresponds to the ``security_policy_resource`` field + security_policy (str): + Name of the security policy to get. + This corresponds to the ``security_policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1042,15 +1088,19 @@ def sample_insert(): sent along with the request as metadata. Returns: - google.api_core.extended_operation.ExtendedOperation: - An object representing a extended - long-running operation. + google.cloud.compute_v1.types.SecurityPolicy: + Represents a Google Cloud Armor + security policy resource. Only external + backend services that use load balancers + can reference a security policy. For + more information, see Google Cloud Armor + security policy overview. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project, region, security_policy_resource]) + has_flattened_params = any([project, region, security_policy]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1058,23 +1108,23 @@ def sample_insert(): ) # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionSecurityPolicyRequest. + # in a compute.GetRegionSecurityPolicyRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, compute.InsertRegionSecurityPolicyRequest): - request = compute.InsertRegionSecurityPolicyRequest(request) + if not isinstance(request, compute.GetRegionSecurityPolicyRequest): + request = compute.GetRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if project is not None: request.project = project if region is not None: request.region = region - if security_policy_resource is not None: - request.security_policy_resource = security_policy_resource + if security_policy is not None: + request.security_policy = security_policy # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.insert] + rpc = self._transport._wrapped_methods[self._transport.get] # Certain fields should be provided within the metadata header; # add these here. @@ -1083,6 +1133,7 @@ def sample_insert(): ( ("project", request.project), ("region", request.region), + ("security_policy", request.security_policy), ) ), ) @@ -1095,48 +1146,23 @@ def sample_insert(): metadata=metadata, ) - operation_service = self._transport._region_operations_client - operation_request = compute.GetRegionOperationRequest() - operation_request.project = request.project - operation_request.region = request.region - operation_request.operation = response.name - - get_operation = functools.partial(operation_service.get, operation_request) - # Cancel is not part of extended operations yet. - cancel_operation = lambda: None - - # Note: this class is an implementation detail to provide a uniform - # set of names for certain fields in the extended operation proto message. - # See google.api_core.extended_operation.ExtendedOperation for details - # on these properties and the expected interface. - class _CustomOperation(extended_operation.ExtendedOperation): - @property - def error_message(self): - return self._extended_operation.http_error_message - - @property - def error_code(self): - return self._extended_operation.http_error_status_code - - response = _CustomOperation.make(get_operation, cancel_operation, response) - # Done; return the response. return response - def list( + def get_rule( self, request: Optional[ - Union[compute.ListRegionSecurityPoliciesRequest, dict] + Union[compute.GetRuleRegionSecurityPolicyRequest, dict] ] = None, *, project: Optional[str] = None, region: Optional[str] = None, + security_policy: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListPager: - r"""List all the policies that have been configured for - the specified project and region. + ) -> compute.SecurityPolicyRule: + r"""Gets a rule at the specified priority. .. code-block:: python @@ -1149,27 +1175,27 @@ def list( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import compute_v1 - def sample_list(): + def sample_get_rule(): # Create a client client = compute_v1.RegionSecurityPoliciesClient() # Initialize request argument(s) - request = compute_v1.ListRegionSecurityPoliciesRequest( + request = compute_v1.GetRuleRegionSecurityPolicyRequest( project="project_value", region="region_value", + security_policy="security_policy_value", ) # Make the request - page_result = client.list(request=request) + response = client.get_rule(request=request) # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.compute_v1.types.ListRegionSecurityPoliciesRequest, dict]): + request (Union[google.cloud.compute_v1.types.GetRuleRegionSecurityPolicyRequest, dict]): The request object. A request message for - RegionSecurityPolicies.List. See the + RegionSecurityPolicies.GetRule. See the method description for details. project (str): Project ID for this request. @@ -1183,6 +1209,13 @@ def sample_list(): This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + security_policy (str): + Name of the security policy to which + the queried rule belongs. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1190,8 +1223,415 @@ def sample_list(): sent along with the request as metadata. Returns: - google.cloud.compute_v1.services.region_security_policies.pagers.ListPager: - Iterating over this object will yield + google.cloud.compute_v1.types.SecurityPolicyRule: + Represents a rule that describes one + or more match conditions along with the + action to be taken when traffic matches + this condition (allow or deny). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, security_policy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRuleRegionSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRuleRegionSecurityPolicyRequest): + request = compute.GetRuleRegionSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if security_policy is not None: + request.security_policy = security_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("security_policy", request.security_policy), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary( + self, + request: Optional[ + Union[compute.InsertRegionSecurityPolicyRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + security_policy_resource: Optional[compute.SecurityPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a new policy in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionSecurityPolicyRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + This corresponds to the ``security_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, security_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionSecurityPolicyRequest): + request = compute.InsertRegionSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if security_policy_resource is not None: + request.security_policy_resource = security_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert( + self, + request: Optional[ + Union[compute.InsertRegionSecurityPolicyRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + security_policy_resource: Optional[compute.SecurityPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a new policy in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionSecurityPolicyRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + This corresponds to the ``security_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, security_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionSecurityPolicyRequest): + request = compute.InsertRegionSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if security_policy_resource is not None: + request.security_policy_resource = security_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list( + self, + request: Optional[ + Union[compute.ListRegionSecurityPoliciesRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""List all the policies that have been configured for + the specified project and region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionSecurityPoliciesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionSecurityPoliciesRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_security_policies.pagers.ListPager: + Iterating over this object will yield results and resolve additional pages automatically. @@ -1199,7 +1639,467 @@ def sample_list(): # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project, region]) + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionSecurityPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionSecurityPoliciesRequest): + request = compute.ListRegionSecurityPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary( + self, + request: Optional[Union[compute.PatchRegionSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + security_policy: Optional[str] = None, + security_policy_resource: Optional[compute.SecurityPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified policy with the data included + in the request. To clear fields in the policy, leave the + fields empty and specify them in the updateMask. This + cannot be used to be update the rules in the policy. + Please use the per rule methods like addRule, patchRule, + and removeRule instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionSecurityPolicyRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + update. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + This corresponds to the ``security_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, region, security_policy, security_policy_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionSecurityPolicyRequest): + request = compute.PatchRegionSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if security_policy is not None: + request.security_policy = security_policy + if security_policy_resource is not None: + request.security_policy_resource = security_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("security_policy", request.security_policy), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch( + self, + request: Optional[Union[compute.PatchRegionSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + security_policy: Optional[str] = None, + security_policy_resource: Optional[compute.SecurityPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified policy with the data included + in the request. To clear fields in the policy, leave the + fields empty and specify them in the updateMask. This + cannot be used to be update the rules in the policy. + Please use the per rule methods like addRule, patchRule, + and removeRule instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionSecurityPolicyRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + update. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + This corresponds to the ``security_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, region, security_policy, security_policy_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionSecurityPolicyRequest): + request = compute.PatchRegionSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if security_policy is not None: + request.security_policy = security_policy + if security_policy_resource is not None: + request.security_policy_resource = security_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("security_policy", request.security_policy), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def patch_rule_unary( + self, + request: Optional[ + Union[compute.PatchRuleRegionSecurityPolicyRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + security_policy: Optional[str] = None, + security_policy_rule_resource: Optional[compute.SecurityPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches a rule at the specified priority. To clear + fields in the rule, leave the fields empty and specify + them in the updateMask. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch_rule(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRuleRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.patch_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRuleRegionSecurityPolicyRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.PatchRule. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + update. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_rule_resource (google.cloud.compute_v1.types.SecurityPolicyRule): + The body resource for this request + This corresponds to the ``security_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, region, security_policy, security_policy_rule_resource] + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1207,21 +2107,25 @@ def sample_list(): ) # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionSecurityPoliciesRequest. + # in a compute.PatchRuleRegionSecurityPolicyRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, compute.ListRegionSecurityPoliciesRequest): - request = compute.ListRegionSecurityPoliciesRequest(request) + if not isinstance(request, compute.PatchRuleRegionSecurityPolicyRequest): + request = compute.PatchRuleRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if project is not None: request.project = project if region is not None: request.region = region + if security_policy is not None: + request.security_policy = security_policy + if security_policy_rule_resource is not None: + request.security_policy_rule_resource = security_policy_rule_resource # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list] + rpc = self._transport._wrapped_methods[self._transport.patch_rule] # Certain fields should be provided within the metadata header; # add these here. @@ -1230,6 +2134,7 @@ def sample_list(): ( ("project", request.project), ("region", request.region), + ("security_policy", request.security_policy), ) ), ) @@ -1242,36 +2147,26 @@ def sample_list(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - # Done; return the response. return response - def patch_unary( + def patch_rule( self, - request: Optional[Union[compute.PatchRegionSecurityPolicyRequest, dict]] = None, + request: Optional[ + Union[compute.PatchRuleRegionSecurityPolicyRequest, dict] + ] = None, *, project: Optional[str] = None, region: Optional[str] = None, security_policy: Optional[str] = None, - security_policy_resource: Optional[compute.SecurityPolicy] = None, + security_policy_rule_resource: Optional[compute.SecurityPolicyRule] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> compute.Operation: - r"""Patches the specified policy with the data included - in the request. To clear fields in the policy, leave the - fields empty and specify them in the updateMask. This - cannot be used to be update the rules in the policy. - Please use the per rule methods like addRule, patchRule, - and removeRule instead. + ) -> extended_operation.ExtendedOperation: + r"""Patches a rule at the specified priority. To clear + fields in the rule, leave the fields empty and specify + them in the updateMask. .. code-block:: python @@ -1284,28 +2179,28 @@ def patch_unary( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import compute_v1 - def sample_patch(): + def sample_patch_rule(): # Create a client client = compute_v1.RegionSecurityPoliciesClient() # Initialize request argument(s) - request = compute_v1.PatchRegionSecurityPolicyRequest( + request = compute_v1.PatchRuleRegionSecurityPolicyRequest( project="project_value", region="region_value", security_policy="security_policy_value", ) # Make the request - response = client.patch(request=request) + response = client.patch_rule(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.compute_v1.types.PatchRegionSecurityPolicyRequest, dict]): + request (Union[google.cloud.compute_v1.types.PatchRuleRegionSecurityPolicyRequest, dict]): The request object. A request message for - RegionSecurityPolicies.Patch. See the - method description for details. + RegionSecurityPolicies.PatchRule. See + the method description for details. project (str): Project ID for this request. This corresponds to the ``project`` field @@ -1325,9 +2220,9 @@ def sample_patch(): This corresponds to the ``security_policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + security_policy_rule_resource (google.cloud.compute_v1.types.SecurityPolicyRule): The body resource for this request - This corresponds to the ``security_policy_resource`` field + This corresponds to the ``security_policy_rule_resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1346,7 +2241,7 @@ def sample_patch(): # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any( - [project, region, security_policy, security_policy_resource] + [project, region, security_policy, security_policy_rule_resource] ) if request is not None and has_flattened_params: raise ValueError( @@ -1355,11 +2250,11 @@ def sample_patch(): ) # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionSecurityPolicyRequest. + # in a compute.PatchRuleRegionSecurityPolicyRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, compute.PatchRegionSecurityPolicyRequest): - request = compute.PatchRegionSecurityPolicyRequest(request) + if not isinstance(request, compute.PatchRuleRegionSecurityPolicyRequest): + request = compute.PatchRuleRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if project is not None: @@ -1368,12 +2263,12 @@ def sample_patch(): request.region = region if security_policy is not None: request.security_policy = security_policy - if security_policy_resource is not None: - request.security_policy_resource = security_policy_resource + if security_policy_rule_resource is not None: + request.security_policy_rule_resource = security_policy_rule_resource # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.patch] + rpc = self._transport._wrapped_methods[self._transport.patch_rule] # Certain fields should be provided within the metadata header; # add these here. @@ -1395,27 +2290,48 @@ def sample_patch(): metadata=metadata, ) + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + # Done; return the response. return response - def patch( + def remove_rule_unary( self, - request: Optional[Union[compute.PatchRegionSecurityPolicyRequest, dict]] = None, + request: Optional[ + Union[compute.RemoveRuleRegionSecurityPolicyRequest, dict] + ] = None, *, project: Optional[str] = None, region: Optional[str] = None, security_policy: Optional[str] = None, - security_policy_resource: Optional[compute.SecurityPolicy] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> extended_operation.ExtendedOperation: - r"""Patches the specified policy with the data included - in the request. To clear fields in the policy, leave the - fields empty and specify them in the updateMask. This - cannot be used to be update the rules in the policy. - Please use the per rule methods like addRule, patchRule, - and removeRule instead. + ) -> compute.Operation: + r"""Deletes a rule at the specified priority. .. code-block:: python @@ -1428,28 +2344,28 @@ def patch( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import compute_v1 - def sample_patch(): + def sample_remove_rule(): # Create a client client = compute_v1.RegionSecurityPoliciesClient() # Initialize request argument(s) - request = compute_v1.PatchRegionSecurityPolicyRequest( + request = compute_v1.RemoveRuleRegionSecurityPolicyRequest( project="project_value", region="region_value", security_policy="security_policy_value", ) # Make the request - response = client.patch(request=request) + response = client.remove_rule(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.compute_v1.types.PatchRegionSecurityPolicyRequest, dict]): + request (Union[google.cloud.compute_v1.types.RemoveRuleRegionSecurityPolicyRequest, dict]): The request object. A request message for - RegionSecurityPolicies.Patch. See the - method description for details. + RegionSecurityPolicies.RemoveRule. See + the method description for details. project (str): Project ID for this request. This corresponds to the ``project`` field @@ -1469,9 +2385,135 @@ def sample_patch(): This corresponds to the ``security_policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): - The body resource for this request - This corresponds to the ``security_policy_resource`` field + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, security_policy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveRuleRegionSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveRuleRegionSecurityPolicyRequest): + request = compute.RemoveRuleRegionSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if security_policy is not None: + request.security_policy = security_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("security_policy", request.security_policy), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_rule( + self, + request: Optional[ + Union[compute.RemoveRuleRegionSecurityPolicyRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + security_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes a rule at the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_rule(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveRuleRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.remove_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveRuleRegionSecurityPolicyRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.RemoveRule. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + update. + + This corresponds to the ``security_policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1489,9 +2531,7 @@ def sample_patch(): # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any( - [project, region, security_policy, security_policy_resource] - ) + has_flattened_params = any([project, region, security_policy]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1499,11 +2539,11 @@ def sample_patch(): ) # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionSecurityPolicyRequest. + # in a compute.RemoveRuleRegionSecurityPolicyRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, compute.PatchRegionSecurityPolicyRequest): - request = compute.PatchRegionSecurityPolicyRequest(request) + if not isinstance(request, compute.RemoveRuleRegionSecurityPolicyRequest): + request = compute.RemoveRuleRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if project is not None: @@ -1512,12 +2552,10 @@ def sample_patch(): request.region = region if security_policy is not None: request.security_policy = security_policy - if security_policy_resource is not None: - request.security_policy_resource = security_policy_resource # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.patch] + rpc = self._transport._wrapped_methods[self._transport.remove_rule] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/transports/base.py index b27e235eed14..631e879a133e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/transports/base.py @@ -127,6 +127,11 @@ def __init__( def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { + self.add_rule: gapic_v1.method.wrap_method( + self.add_rule, + default_timeout=None, + client_info=client_info, + ), self.delete: gapic_v1.method.wrap_method( self.delete, default_timeout=None, @@ -137,6 +142,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_rule: gapic_v1.method.wrap_method( + self.get_rule, + default_timeout=None, + client_info=client_info, + ), self.insert: gapic_v1.method.wrap_method( self.insert, default_timeout=None, @@ -152,6 +162,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.patch_rule: gapic_v1.method.wrap_method( + self.patch_rule, + default_timeout=None, + client_info=client_info, + ), + self.remove_rule: gapic_v1.method.wrap_method( + self.remove_rule, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -163,6 +183,15 @@ def close(self): """ raise NotImplementedError() + @property + def add_rule( + self, + ) -> Callable[ + [compute.AddRuleRegionSecurityPolicyRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def delete( self, @@ -181,6 +210,15 @@ def get( ]: raise NotImplementedError() + @property + def get_rule( + self, + ) -> Callable[ + [compute.GetRuleRegionSecurityPolicyRequest], + Union[compute.SecurityPolicyRule, Awaitable[compute.SecurityPolicyRule]], + ]: + raise NotImplementedError() + @property def insert( self, @@ -208,6 +246,24 @@ def patch( ]: raise NotImplementedError() + @property + def patch_rule( + self, + ) -> Callable[ + [compute.PatchRuleRegionSecurityPolicyRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def remove_rule( + self, + ) -> Callable[ + [compute.RemoveRuleRegionSecurityPolicyRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/transports/rest.py index 6612c70dfed2..65e67d36a1a9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/transports/rest.py @@ -63,6 +63,14 @@ class RegionSecurityPoliciesRestInterceptor: .. code-block:: python class MyCustomRegionSecurityPoliciesInterceptor(RegionSecurityPoliciesRestInterceptor): + def pre_add_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_rule(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -79,6 +87,14 @@ def post_get(self, response): logging.log(f"Received response: {response}") return response + def pre_get_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_rule(self, response): + logging.log(f"Received response: {response}") + return response + def pre_insert(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -103,12 +119,49 @@ def post_patch(self, response): logging.log(f"Received response: {response}") return response + def pre_patch_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_rule(self, response): + logging.log(f"Received response: {response}") + return response + transport = RegionSecurityPoliciesRestTransport(interceptor=MyCustomRegionSecurityPoliciesInterceptor()) client = RegionSecurityPoliciesClient(transport=transport) """ + def pre_add_rule( + self, + request: compute.AddRuleRegionSecurityPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.AddRuleRegionSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSecurityPolicies server. + """ + return request, metadata + + def post_add_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_rule + + Override in a subclass to manipulate the response + after it is returned by the RegionSecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_delete( self, request: compute.DeleteRegionSecurityPolicyRequest, @@ -151,6 +204,29 @@ def post_get(self, response: compute.SecurityPolicy) -> compute.SecurityPolicy: """ return response + def pre_get_rule( + self, + request: compute.GetRuleRegionSecurityPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.GetRuleRegionSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSecurityPolicies server. + """ + return request, metadata + + def post_get_rule( + self, response: compute.SecurityPolicyRule + ) -> compute.SecurityPolicyRule: + """Post-rpc interceptor for get_rule + + Override in a subclass to manipulate the response + after it is returned by the RegionSecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_insert( self, request: compute.InsertRegionSecurityPolicyRequest, @@ -216,6 +292,50 @@ def post_patch(self, response: compute.Operation) -> compute.Operation: """ return response + def pre_patch_rule( + self, + request: compute.PatchRuleRegionSecurityPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.PatchRuleRegionSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSecurityPolicies server. + """ + return request, metadata + + def post_patch_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch_rule + + Override in a subclass to manipulate the response + after it is returned by the RegionSecurityPolicies server but before + it is returned to user code. + """ + return response + + def pre_remove_rule( + self, + request: compute.RemoveRuleRegionSecurityPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.RemoveRuleRegionSecurityPolicyRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for remove_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSecurityPolicies server. + """ + return request, metadata + + def post_remove_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_rule + + Override in a subclass to manipulate the response + after it is returned by the RegionSecurityPolicies server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class RegionSecurityPoliciesRestStub: @@ -320,6 +440,116 @@ def __init__( self._interceptor = interceptor or RegionSecurityPoliciesRestInterceptor() self._prep_wrapped_messages(client_info) + class _AddRule(RegionSecurityPoliciesRestStub): + def __hash__(self): + return hash("AddRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.AddRuleRegionSecurityPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the add rule method over HTTP. + + Args: + request (~.compute.AddRuleRegionSecurityPolicyRequest): + The request object. A request message for + RegionSecurityPolicies.AddRule. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}/addRule", + "body": "security_policy_rule_resource", + }, + ] + request, metadata = self._interceptor.pre_add_rule(request, metadata) + pb_request = compute.AddRuleRegionSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_rule(resp) + return resp + class _Delete(RegionSecurityPoliciesRestStub): def __hash__(self): return hash("Delete") @@ -370,7 +600,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -513,6 +743,96 @@ def __call__( resp = self._interceptor.post_get(resp) return resp + class _GetRule(RegionSecurityPoliciesRestStub): + def __hash__(self): + return hash("GetRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.GetRuleRegionSecurityPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SecurityPolicyRule: + r"""Call the get rule method over HTTP. + + Args: + request (~.compute.GetRuleRegionSecurityPolicyRequest): + The request object. A request message for + RegionSecurityPolicies.GetRule. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SecurityPolicyRule: + Represents a rule that describes one + or more match conditions along with the + action to be taken when traffic matches + this condition (allow or deny). + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}/getRule", + }, + ] + request, metadata = self._interceptor.pre_get_rule(request, metadata) + pb_request = compute.GetRuleRegionSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SecurityPolicyRule() + pb_resp = compute.SecurityPolicyRule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_rule(resp) + return resp + class _Insert(RegionSecurityPoliciesRestStub): def __hash__(self): return hash("Insert") @@ -563,7 +883,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -759,7 +1079,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -819,6 +1139,225 @@ def __call__( resp = self._interceptor.post_patch(resp) return resp + class _PatchRule(RegionSecurityPoliciesRestStub): + def __hash__(self): + return hash("PatchRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.PatchRuleRegionSecurityPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the patch rule method over HTTP. + + Args: + request (~.compute.PatchRuleRegionSecurityPolicyRequest): + The request object. A request message for + RegionSecurityPolicies.PatchRule. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}/patchRule", + "body": "security_policy_rule_resource", + }, + ] + request, metadata = self._interceptor.pre_patch_rule(request, metadata) + pb_request = compute.PatchRuleRegionSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch_rule(resp) + return resp + + class _RemoveRule(RegionSecurityPoliciesRestStub): + def __hash__(self): + return hash("RemoveRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.RemoveRuleRegionSecurityPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the remove rule method over HTTP. + + Args: + request (~.compute.RemoveRuleRegionSecurityPolicyRequest): + The request object. A request message for + RegionSecurityPolicies.RemoveRule. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}/removeRule", + }, + ] + request, metadata = self._interceptor.pre_remove_rule(request, metadata) + pb_request = compute.RemoveRuleRegionSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_rule(resp) + return resp + + @property + def add_rule( + self, + ) -> Callable[[compute.AddRuleRegionSecurityPolicyRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddRule(self._session, self._host, self._interceptor) # type: ignore + @property def delete( self, @@ -835,6 +1374,16 @@ def get( # In C++ this would require a dynamic_cast return self._Get(self._session, self._host, self._interceptor) # type: ignore + @property + def get_rule( + self, + ) -> Callable[ + [compute.GetRuleRegionSecurityPolicyRequest], compute.SecurityPolicyRule + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRule(self._session, self._host, self._interceptor) # type: ignore + @property def insert( self, @@ -861,6 +1410,22 @@ def patch( # In C++ this would require a dynamic_cast return self._Patch(self._session, self._host, self._interceptor) # type: ignore + @property + def patch_rule( + self, + ) -> Callable[[compute.PatchRuleRegionSecurityPolicyRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PatchRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_rule( + self, + ) -> Callable[[compute.RemoveRuleRegionSecurityPolicyRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemoveRule(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py index efe167a61df1..68f47bc91439 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py @@ -787,22 +787,26 @@ def sample_get(): Returns: google.cloud.compute_v1.types.SslCertificate: - Represents an SSL Certificate resource. Google Compute - Engine has two SSL Certificate resources: \* + Represents an SSL certificate resource. Google Compute + Engine has two SSL certificate resources: \* [Global](/compute/docs/reference/rest/v1/sslCertificates) \* [Regional](/compute/docs/reference/rest/v1/regionSslCertificates) - The sslCertificates are used by: - external HTTPS load - balancers - SSL proxy load balancers The - regionSslCertificates are used by internal HTTPS load - balancers. Optionally, certificate file contents that - you upload can contain a set of up to five PEM-encoded - certificates. The API call creates an object - (sslCertificate) that holds this data. You can use SSL - keys and certificates to secure connections to a load - balancer. For more information, read Creating and using - SSL certificates, SSL certificates quotas and limits, - and Troubleshooting SSL certificates. + The global SSL certificates (sslCertificates) are used + by: - Global external Application Load Balancers - + Classic Application Load Balancers - Proxy Network Load + Balancers (with target SSL proxies) The regional SSL + certificates (regionSslCertificates) are used by: - + Regional external Application Load Balancers - Regional + internal Application Load Balancers Optionally, + certificate file contents that you upload can contain a + set of up to five PEM-encoded certificates. The API call + creates an object (sslCertificate) that holds this data. + You can use SSL keys and certificates to secure + connections to a load balancer. For more information, + read Creating and using SSL certificates, SSL + certificates quotas and limits, and Troubleshooting SSL + certificates. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py index c9f947cc0181..273c6430bc91 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py @@ -341,7 +341,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -429,22 +429,26 @@ def __call__( Returns: ~.compute.SslCertificate: - Represents an SSL Certificate resource. Google Compute - Engine has two SSL Certificate resources: \* + Represents an SSL certificate resource. Google Compute + Engine has two SSL certificate resources: \* `Global `__ \* `Regional `__ - The sslCertificates are used by: - external HTTPS load - balancers - SSL proxy load balancers The - regionSslCertificates are used by internal HTTPS load - balancers. Optionally, certificate file contents that - you upload can contain a set of up to five PEM-encoded - certificates. The API call creates an object - (sslCertificate) that holds this data. You can use SSL - keys and certificates to secure connections to a load - balancer. For more information, read Creating and using - SSL certificates, SSL certificates quotas and limits, - and Troubleshooting SSL certificates. + The global SSL certificates (sslCertificates) are used + by: - Global external Application Load Balancers - + Classic Application Load Balancers - Proxy Network Load + Balancers (with target SSL proxies) The regional SSL + certificates (regionSslCertificates) are used by: - + Regional external Application Load Balancers - Regional + internal Application Load Balancers Optionally, + certificate file contents that you upload can contain a + set of up to five PEM-encoded certificates. The API call + creates an object (sslCertificate) that holds this data. + You can use SSL keys and certificates to secure + connections to a load balancer. For more information, + read Creating and using SSL certificates, SSL + certificates quotas and limits, and Troubleshooting SSL + certificates. """ @@ -544,7 +548,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/transports/rest.py index ae4204099924..42a91c1d9faf 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/transports/rest.py @@ -401,7 +401,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -594,7 +594,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -880,7 +880,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py index d97fb374171d..374a37800e72 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py @@ -789,14 +789,17 @@ def sample_get(): [Global](/compute/docs/reference/rest/v1/targetHttpProxies) \* [Regional](/compute/docs/reference/rest/v1/regionTargetHttpProxies) - A target HTTP proxy is a component of GCP HTTP load - balancers. \* targetHttpProxies are used by external - HTTP load balancers and Traffic Director. \* - regionTargetHttpProxies are used by internal HTTP load - balancers. Forwarding rules reference a target HTTP - proxy, and the target proxy then references a URL map. - For more information, read Using Target Proxies and - Forwarding rule concepts. + A target HTTP proxy is a component of Google Cloud HTTP + load balancers. \* targetHttpProxies are used by global + external Application Load Balancers, classic Application + Load Balancers, cross-region internal Application Load + Balancers, and Traffic Director. \* + regionTargetHttpProxies are used by regional internal + Application Load Balancers and regional external + Application Load Balancers. Forwarding rules reference a + target HTTP proxy, and the target proxy then references + a URL map. For more information, read Using Target + Proxies and Forwarding rule concepts. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py index ded58c19b1c9..07b0e6bc811b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py @@ -372,7 +372,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -465,14 +465,17 @@ def __call__( `Global `__ \* `Regional `__ - A target HTTP proxy is a component of GCP HTTP load - balancers. \* targetHttpProxies are used by external - HTTP load balancers and Traffic Director. \* - regionTargetHttpProxies are used by internal HTTP load - balancers. Forwarding rules reference a target HTTP - proxy, and the target proxy then references a URL map. - For more information, read Using Target Proxies and - Forwarding rule concepts. + A target HTTP proxy is a component of Google Cloud HTTP + load balancers. \* targetHttpProxies are used by global + external Application Load Balancers, classic Application + Load Balancers, cross-region internal Application Load + Balancers, and Traffic Director. \* + regionTargetHttpProxies are used by regional internal + Application Load Balancers and regional external + Application Load Balancers. Forwarding rules reference a + target HTTP proxy, and the target proxy then references + a URL map. For more information, read Using Target + Proxies and Forwarding rule concepts. """ @@ -572,7 +575,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -768,7 +771,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py index 6c60cd68a725..4290a3815a88 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py @@ -790,12 +790,16 @@ def sample_get(): \* [Regional](/compute/docs/reference/rest/v1/regionTargetHttpsProxies) A target HTTPS proxy is a component of GCP HTTPS load - balancers. \* targetHttpsProxies are used by external - HTTPS load balancers. \* regionTargetHttpsProxies are - used by internal HTTPS load balancers. Forwarding rules - reference a target HTTPS proxy, and the target proxy - then references a URL map. For more information, read - Using Target Proxies and Forwarding rule concepts. + balancers. \* targetHttpProxies are used by global + external Application Load Balancers, classic Application + Load Balancers, cross-region internal Application Load + Balancers, and Traffic Director. \* + regionTargetHttpProxies are used by regional internal + Application Load Balancers and regional external + Application Load Balancers. Forwarding rules reference a + target HTTPS proxy, and the target proxy then references + a URL map. For more information, read Using Target + Proxies and Forwarding rule concepts. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py index 5190a7a2a4d7..c4bf0bc2a375 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py @@ -435,7 +435,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -529,12 +529,16 @@ def __call__( \* `Regional `__ A target HTTPS proxy is a component of GCP HTTPS load - balancers. \* targetHttpsProxies are used by external - HTTPS load balancers. \* regionTargetHttpsProxies are - used by internal HTTPS load balancers. Forwarding rules - reference a target HTTPS proxy, and the target proxy - then references a URL map. For more information, read - Using Target Proxies and Forwarding rule concepts. + balancers. \* targetHttpProxies are used by global + external Application Load Balancers, classic Application + Load Balancers, cross-region internal Application Load + Balancers, and Traffic Director. \* + regionTargetHttpProxies are used by regional internal + Application Load Balancers and regional external + Application Load Balancers. Forwarding rules reference a + target HTTPS proxy, and the target proxy then references + a URL map. For more information, read Using Target + Proxies and Forwarding rule concepts. """ @@ -634,7 +638,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -832,7 +836,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -942,7 +946,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1056,7 +1060,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/rest.py index 8ced092be52e..722c4f9ab242 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/rest.py @@ -341,7 +341,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -536,7 +536,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py index 27fa440f0a0d..9b94b4db12e0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py @@ -783,19 +783,23 @@ def sample_get(): [Regional](/compute/docs/reference/rest/v1/regionUrlMaps) A URL map resource is a component of certain types of cloud load balancers and Traffic Director: \* urlMaps - are used by external HTTP(S) load balancers and Traffic - Director. \* regionUrlMaps are used by internal HTTP(S) - load balancers. For a list of supported URL map features - by the load balancer type, see the Load balancing - features: Routing and traffic management table. For a - list of supported URL map features for Traffic Director, - see the Traffic Director features: Routing and traffic - management table. This resource defines mappings from - hostnames and URL paths to either a backend service or a - backend bucket. To use the global urlMaps resource, the - backend service must have a loadBalancingScheme of - either EXTERNAL or INTERNAL_SELF_MANAGED. To use the - regionUrlMaps resource, the backend service must have a + are used by global external Application Load Balancers, + classic Application Load Balancers, and cross-region + internal Application Load Balancers. \* regionUrlMaps + are used by internal Application Load Balancers, + regional external Application Load Balancers and + regional internal Application Load Balancers. For a list + of supported URL map features by the load balancer type, + see the Load balancing features: Routing and traffic + management table. For a list of supported URL map + features for Traffic Director, see the Traffic Director + features: Routing and traffic management table. This + resource defines mappings from hostnames and URL paths + to either a backend service or a backend bucket. To use + the global urlMaps resource, the backend service must + have a loadBalancingScheme of either EXTERNAL or + INTERNAL_SELF_MANAGED. To use the regionUrlMaps + resource, the backend service must have a loadBalancingScheme of INTERNAL_MANAGED. For more information, read URL Map Concepts. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/transports/rest.py index ab4d0c029b3b..57294580f58e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/transports/rest.py @@ -428,7 +428,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -522,19 +522,23 @@ def __call__( `Regional `__ A URL map resource is a component of certain types of cloud load balancers and Traffic Director: \* urlMaps - are used by external HTTP(S) load balancers and Traffic - Director. \* regionUrlMaps are used by internal HTTP(S) - load balancers. For a list of supported URL map features - by the load balancer type, see the Load balancing - features: Routing and traffic management table. For a - list of supported URL map features for Traffic Director, - see the Traffic Director features: Routing and traffic - management table. This resource defines mappings from - hostnames and URL paths to either a backend service or a - backend bucket. To use the global urlMaps resource, the - backend service must have a loadBalancingScheme of - either EXTERNAL or INTERNAL_SELF_MANAGED. To use the - regionUrlMaps resource, the backend service must have a + are used by global external Application Load Balancers, + classic Application Load Balancers, and cross-region + internal Application Load Balancers. \* regionUrlMaps + are used by internal Application Load Balancers, + regional external Application Load Balancers and + regional internal Application Load Balancers. For a list + of supported URL map features by the load balancer type, + see the Load balancing features: Routing and traffic + management table. For a list of supported URL map + features for Traffic Director, see the Traffic Director + features: Routing and traffic management table. This + resource defines mappings from hostnames and URL paths + to either a backend service or a backend bucket. To use + the global urlMaps resource, the backend service must + have a loadBalancingScheme of either EXTERNAL or + INTERNAL_SELF_MANAGED. To use the regionUrlMaps + resource, the backend service must have a loadBalancingScheme of INTERNAL_MANAGED. For more information, read URL Map Concepts. @@ -636,7 +640,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -832,7 +836,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -942,7 +946,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py index 1f13e7791219..89d2ac0a10f3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py @@ -1032,29 +1032,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -1913,29 +1895,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/transports/rest.py index 4dee146b04fb..00449a53160a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/transports/rest.py @@ -603,7 +603,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -800,29 +800,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -923,7 +905,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1119,7 +1101,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1233,29 +1215,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1462,7 +1426,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py index 603b609f1d2b..7f6a5ed1b4f1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py @@ -1042,29 +1042,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -1914,29 +1896,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/transports/rest.py index 90b2870fc0bc..0aaa07468663 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/transports/rest.py @@ -580,7 +580,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -777,29 +777,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -900,7 +882,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1096,7 +1078,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1210,29 +1192,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py index 18b9413a71ad..387209db7825 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py @@ -939,6 +939,132 @@ def sample_get(): # Done; return the response. return response + def get_nat_ip_info( + self, + request: Optional[Union[compute.GetNatIpInfoRouterRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + router: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.NatIpInfoResponse: + r"""Retrieves runtime NAT IP information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_nat_ip_info(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.GetNatIpInfoRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.get_nat_ip_info(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetNatIpInfoRouterRequest, dict]): + The request object. A request message for + Routers.GetNatIpInfo. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router (str): + Name of the Router resource to query + for Nat IP information. The name should + conform to RFC1035. + + This corresponds to the ``router`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.NatIpInfoResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, router]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetNatIpInfoRouterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetNatIpInfoRouterRequest): + request = compute.GetNatIpInfoRouterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if router is not None: + request.router = router + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_nat_ip_info] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("router", request.router), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def get_nat_mapping_info( self, request: Optional[Union[compute.GetNatMappingInfoRoutersRequest, dict]] = None, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/transports/base.py index 31bdff8a8108..b7ae7ece8358 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/transports/base.py @@ -142,6 +142,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_nat_ip_info: gapic_v1.method.wrap_method( + self.get_nat_ip_info, + default_timeout=None, + client_info=client_info, + ), self.get_nat_mapping_info: gapic_v1.method.wrap_method( self.get_nat_mapping_info, default_timeout=None, @@ -214,6 +219,15 @@ def get( ]: raise NotImplementedError() + @property + def get_nat_ip_info( + self, + ) -> Callable[ + [compute.GetNatIpInfoRouterRequest], + Union[compute.NatIpInfoResponse, Awaitable[compute.NatIpInfoResponse]], + ]: + raise NotImplementedError() + @property def get_nat_mapping_info( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/transports/rest.py index 50a2b6a67656..bb8c4722722b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/transports/rest.py @@ -87,6 +87,14 @@ def post_get(self, response): logging.log(f"Received response: {response}") return response + def pre_get_nat_ip_info(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_nat_ip_info(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_nat_mapping_info(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -210,6 +218,29 @@ def post_get(self, response: compute.Router) -> compute.Router: """ return response + def pre_get_nat_ip_info( + self, + request: compute.GetNatIpInfoRouterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.GetNatIpInfoRouterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_nat_ip_info + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routers server. + """ + return request, metadata + + def post_get_nat_ip_info( + self, response: compute.NatIpInfoResponse + ) -> compute.NatIpInfoResponse: + """Post-rpc interceptor for get_nat_ip_info + + Override in a subclass to manipulate the response + after it is returned by the Routers server but before + it is returned to user code. + """ + return response + def pre_get_nat_mapping_info( self, request: compute.GetNatMappingInfoRoutersRequest, @@ -592,7 +623,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -731,6 +762,92 @@ def __call__( resp = self._interceptor.post_get(resp) return resp + class _GetNatIpInfo(RoutersRestStub): + def __hash__(self): + return hash("GetNatIpInfo") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.GetNatIpInfoRouterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.NatIpInfoResponse: + r"""Call the get nat ip info method over HTTP. + + Args: + request (~.compute.GetNatIpInfoRouterRequest): + The request object. A request message for + Routers.GetNatIpInfo. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NatIpInfoResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/routers/{router}/getNatIpInfo", + }, + ] + request, metadata = self._interceptor.pre_get_nat_ip_info(request, metadata) + pb_request = compute.GetNatIpInfoRouterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NatIpInfoResponse() + pb_resp = compute.NatIpInfoResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_nat_ip_info(resp) + return resp + class _GetNatMappingInfo(RoutersRestStub): def __hash__(self): return hash("GetNatMappingInfo") @@ -958,7 +1075,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1152,7 +1269,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1356,7 +1473,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1436,6 +1553,14 @@ def get(self) -> Callable[[compute.GetRouterRequest], compute.Router]: # In C++ this would require a dynamic_cast return self._Get(self._session, self._host, self._interceptor) # type: ignore + @property + def get_nat_ip_info( + self, + ) -> Callable[[compute.GetNatIpInfoRouterRequest], compute.NatIpInfoResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetNatIpInfo(self._session, self._host, self._interceptor) # type: ignore + @property def get_nat_mapping_info( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/transports/rest.py index d15190cf56ca..0e8d9ca9aad5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/transports/rest.py @@ -330,7 +330,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -521,7 +521,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/transports/rest.py index dec10513f610..ff6004715941 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/transports/rest.py @@ -584,7 +584,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -780,7 +780,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1063,7 +1063,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1357,7 +1357,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1467,7 +1467,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1577,7 +1577,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1678,7 +1678,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py index dc23651166d3..07bd308942c5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py @@ -1050,29 +1050,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -1943,29 +1925,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/transports/rest.py index 0089da59acf4..b2b94e24262c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/transports/rest.py @@ -584,7 +584,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -783,29 +783,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -906,7 +888,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1102,7 +1084,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1216,29 +1198,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/__init__.py new file mode 100644 index 000000000000..8cecc57f1c05 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import SnapshotSettingsServiceClient + +__all__ = ("SnapshotSettingsServiceClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py new file mode 100644 index 000000000000..1cb64cf3cbbe --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py @@ -0,0 +1,785 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation, gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore + +from google.cloud.compute_v1.types import compute + +from .transports.base import DEFAULT_CLIENT_INFO, SnapshotSettingsServiceTransport +from .transports.rest import SnapshotSettingsServiceRestTransport + + +class SnapshotSettingsServiceClientMeta(type): + """Metaclass for the SnapshotSettingsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[SnapshotSettingsServiceTransport]] + _transport_registry["rest"] = SnapshotSettingsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SnapshotSettingsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SnapshotSettingsServiceClient(metaclass=SnapshotSettingsServiceClientMeta): + """The SnapshotSettings API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SnapshotSettingsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SnapshotSettingsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SnapshotSettingsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SnapshotSettingsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SnapshotSettingsServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the snapshot settings service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SnapshotSettingsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SnapshotSettingsServiceTransport): + # transport is a SnapshotSettingsServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get( + self, + request: Optional[Union[compute.GetSnapshotSettingRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SnapshotSettings: + r"""Get snapshot settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.SnapshotSettingsServiceClient() + + # Initialize request argument(s) + request = compute_v1.GetSnapshotSettingRequest( + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetSnapshotSettingRequest, dict]): + The request object. A request message for + SnapshotSettingsService.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.SnapshotSettings: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetSnapshotSettingRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetSnapshotSettingRequest): + request = compute.GetSnapshotSettingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary( + self, + request: Optional[Union[compute.PatchSnapshotSettingRequest, dict]] = None, + *, + project: Optional[str] = None, + snapshot_settings_resource: Optional[compute.SnapshotSettings] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patch snapshot settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.SnapshotSettingsServiceClient() + + # Initialize request argument(s) + request = compute_v1.PatchSnapshotSettingRequest( + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchSnapshotSettingRequest, dict]): + The request object. A request message for + SnapshotSettingsService.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_settings_resource (google.cloud.compute_v1.types.SnapshotSettings): + The body resource for this request + This corresponds to the ``snapshot_settings_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, snapshot_settings_resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchSnapshotSettingRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchSnapshotSettingRequest): + request = compute.PatchSnapshotSettingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if snapshot_settings_resource is not None: + request.snapshot_settings_resource = snapshot_settings_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch( + self, + request: Optional[Union[compute.PatchSnapshotSettingRequest, dict]] = None, + *, + project: Optional[str] = None, + snapshot_settings_resource: Optional[compute.SnapshotSettings] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patch snapshot settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.SnapshotSettingsServiceClient() + + # Initialize request argument(s) + request = compute_v1.PatchSnapshotSettingRequest( + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchSnapshotSettingRequest, dict]): + The request object. A request message for + SnapshotSettingsService.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_settings_resource (google.cloud.compute_v1.types.SnapshotSettings): + The body resource for this request + This corresponds to the ``snapshot_settings_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, snapshot_settings_resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchSnapshotSettingRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchSnapshotSettingRequest): + request = compute.PatchSnapshotSettingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if snapshot_settings_resource is not None: + request.snapshot_settings_resource = snapshot_settings_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "SnapshotSettingsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SnapshotSettingsServiceClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/__init__.py new file mode 100644 index 000000000000..abdfcfa96e45 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SnapshotSettingsServiceTransport +from .rest import ( + SnapshotSettingsServiceRestInterceptor, + SnapshotSettingsServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[SnapshotSettingsServiceTransport]] +_transport_registry["rest"] = SnapshotSettingsServiceRestTransport + +__all__ = ( + "SnapshotSettingsServiceTransport", + "SnapshotSettingsServiceRestTransport", + "SnapshotSettingsServiceRestInterceptor", +) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/base.py new file mode 100644 index 000000000000..14850b054f4a --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/base.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version +from google.cloud.compute_v1.services import global_operations +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SnapshotSettingsServiceTransport(abc.ABC): + """Abstract transport class for SnapshotSettingsService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "compute.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get( + self, + ) -> Callable[ + [compute.GetSnapshotSettingRequest], + Union[compute.SnapshotSettings, Awaitable[compute.SnapshotSettings]], + ]: + raise NotImplementedError() + + @property + def patch( + self, + ) -> Callable[ + [compute.PatchSnapshotSettingRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ("SnapshotSettingsServiceTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/rest.py new file mode 100644 index 000000000000..7397372bae83 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/rest.py @@ -0,0 +1,454 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SnapshotSettingsServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SnapshotSettingsServiceRestInterceptor: + """Interceptor for SnapshotSettingsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SnapshotSettingsServiceRestTransport. + + .. code-block:: python + class MyCustomSnapshotSettingsServiceInterceptor(SnapshotSettingsServiceRestInterceptor): + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SnapshotSettingsServiceRestTransport(interceptor=MyCustomSnapshotSettingsServiceInterceptor()) + client = SnapshotSettingsServiceClient(transport=transport) + + + """ + + def pre_get( + self, + request: compute.GetSnapshotSettingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.GetSnapshotSettingRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the SnapshotSettingsService server. + """ + return request, metadata + + def post_get(self, response: compute.SnapshotSettings) -> compute.SnapshotSettings: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the SnapshotSettingsService server but before + it is returned to user code. + """ + return response + + def pre_patch( + self, + request: compute.PatchSnapshotSettingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.PatchSnapshotSettingRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the SnapshotSettingsService server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the SnapshotSettingsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SnapshotSettingsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SnapshotSettingsServiceRestInterceptor + + +class SnapshotSettingsServiceRestTransport(SnapshotSettingsServiceTransport): + """REST backend transport for SnapshotSettingsService. + + The SnapshotSettings API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SnapshotSettingsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SnapshotSettingsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Get(SnapshotSettingsServiceRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.GetSnapshotSettingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SnapshotSettings: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetSnapshotSettingRequest): + The request object. A request message for + SnapshotSettingsService.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SnapshotSettings: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/snapshotSettings", + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetSnapshotSettingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SnapshotSettings() + pb_resp = compute.SnapshotSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Patch(SnapshotSettingsServiceRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.PatchSnapshotSettingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchSnapshotSettingRequest): + The request object. A request message for + SnapshotSettingsService.Patch. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/snapshotSettings", + "body": "snapshot_settings_resource", + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchSnapshotSettingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + @property + def get( + self, + ) -> Callable[[compute.GetSnapshotSettingRequest], compute.SnapshotSettings]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch( + self, + ) -> Callable[[compute.PatchSnapshotSettingRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SnapshotSettingsServiceRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py index 31cc20f17e5c..fec848613c4d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py @@ -891,29 +891,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -1421,29 +1403,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest.py index 76c4a2426be4..68a535d5c0e3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest.py @@ -453,7 +453,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -648,29 +648,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -771,7 +753,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -972,29 +954,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1104,7 +1068,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py index 627301b15b70..e4072ba4c723 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py @@ -862,22 +862,26 @@ def sample_get(): Returns: google.cloud.compute_v1.types.SslCertificate: - Represents an SSL Certificate resource. Google Compute - Engine has two SSL Certificate resources: \* + Represents an SSL certificate resource. Google Compute + Engine has two SSL certificate resources: \* [Global](/compute/docs/reference/rest/v1/sslCertificates) \* [Regional](/compute/docs/reference/rest/v1/regionSslCertificates) - The sslCertificates are used by: - external HTTPS load - balancers - SSL proxy load balancers The - regionSslCertificates are used by internal HTTPS load - balancers. Optionally, certificate file contents that - you upload can contain a set of up to five PEM-encoded - certificates. The API call creates an object - (sslCertificate) that holds this data. You can use SSL - keys and certificates to secure connections to a load - balancer. For more information, read Creating and using - SSL certificates, SSL certificates quotas and limits, - and Troubleshooting SSL certificates. + The global SSL certificates (sslCertificates) are used + by: - Global external Application Load Balancers - + Classic Application Load Balancers - Proxy Network Load + Balancers (with target SSL proxies) The regional SSL + certificates (regionSslCertificates) are used by: - + Regional external Application Load Balancers - Regional + internal Application Load Balancers Optionally, + certificate file contents that you upload can contain a + set of up to five PEM-encoded certificates. The API call + creates an object (sslCertificate) that holds this data. + You can use SSL keys and certificates to secure + connections to a load balancer. For more information, + read Creating and using SSL certificates, SSL + certificates quotas and limits, and Troubleshooting SSL + certificates. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py index ca2cc569d646..0aa6efe1b1ca 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py @@ -458,7 +458,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -546,22 +546,26 @@ def __call__( Returns: ~.compute.SslCertificate: - Represents an SSL Certificate resource. Google Compute - Engine has two SSL Certificate resources: \* + Represents an SSL certificate resource. Google Compute + Engine has two SSL certificate resources: \* `Global `__ \* `Regional `__ - The sslCertificates are used by: - external HTTPS load - balancers - SSL proxy load balancers The - regionSslCertificates are used by internal HTTPS load - balancers. Optionally, certificate file contents that - you upload can contain a set of up to five PEM-encoded - certificates. The API call creates an object - (sslCertificate) that holds this data. You can use SSL - keys and certificates to secure connections to a load - balancer. For more information, read Creating and using - SSL certificates, SSL certificates quotas and limits, - and Troubleshooting SSL certificates. + The global SSL certificates (sslCertificates) are used + by: - Global external Application Load Balancers - + Classic Application Load Balancers - Proxy Network Load + Balancers (with target SSL proxies) The regional SSL + certificates (regionSslCertificates) are used by: - + Regional external Application Load Balancers - Regional + internal Application Load Balancers Optionally, + certificate file contents that you upload can contain a + set of up to five PEM-encoded certificates. The API call + creates an object (sslCertificate) that holds this data. + You can use SSL keys and certificates to secure + connections to a load balancer. For more information, + read Creating and using SSL certificates, SSL + certificates quotas and limits, and Troubleshooting SSL + certificates. """ @@ -661,7 +665,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/transports/rest.py index bed554ad66e8..5a1c3e03390b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/transports/rest.py @@ -516,7 +516,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -709,7 +709,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -993,7 +993,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py index 7081355d927a..b5b14f6848a5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py @@ -1367,29 +1367,11 @@ def sample_get_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ @@ -2371,29 +2353,11 @@ def sample_set_iam_policy(): learn which resources support conditions in their IAM policies, see the [IAM documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the [IAM + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM documentation](\ https://cloud.google.com/iam/docs/). """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/transports/rest.py index b66e74402acf..681146a70a12 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/transports/rest.py @@ -667,7 +667,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -768,7 +768,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -977,29 +977,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1100,7 +1082,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1384,7 +1366,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1498,29 +1480,11 @@ def __call__( learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] }, { "role": - "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": - "expirable access", "description": "Does not grant - access after Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** - bindings: - members: - user:mike@example.com - - group:admins@example.com - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: - roles/resourcemanager.organizationViewer condition: - title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= - version: 3 For a description of IAM and its features, - see the `IAM + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM documentation `__. """ @@ -1631,7 +1595,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py index 4225bd2a0931..a93a6d892549 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py @@ -370,7 +370,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -564,7 +564,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -760,7 +760,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py index 1f374db84841..2baf17857ee7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py @@ -867,14 +867,17 @@ def sample_get(): [Global](/compute/docs/reference/rest/v1/targetHttpProxies) \* [Regional](/compute/docs/reference/rest/v1/regionTargetHttpProxies) - A target HTTP proxy is a component of GCP HTTP load - balancers. \* targetHttpProxies are used by external - HTTP load balancers and Traffic Director. \* - regionTargetHttpProxies are used by internal HTTP load - balancers. Forwarding rules reference a target HTTP - proxy, and the target proxy then references a URL map. - For more information, read Using Target Proxies and - Forwarding rule concepts. + A target HTTP proxy is a component of Google Cloud HTTP + load balancers. \* targetHttpProxies are used by global + external Application Load Balancers, classic Application + Load Balancers, cross-region internal Application Load + Balancers, and Traffic Director. \* + regionTargetHttpProxies are used by regional internal + Application Load Balancers and regional external + Application Load Balancers. Forwarding rules reference a + target HTTP proxy, and the target proxy then references + a URL map. For more information, read Using Target + Proxies and Forwarding rule concepts. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py index 4ebfa888a0ce..022f2b639b63 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py @@ -518,7 +518,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -611,14 +611,17 @@ def __call__( `Global `__ \* `Regional `__ - A target HTTP proxy is a component of GCP HTTP load - balancers. \* targetHttpProxies are used by external - HTTP load balancers and Traffic Director. \* - regionTargetHttpProxies are used by internal HTTP load - balancers. Forwarding rules reference a target HTTP - proxy, and the target proxy then references a URL map. - For more information, read Using Target Proxies and - Forwarding rule concepts. + A target HTTP proxy is a component of Google Cloud HTTP + load balancers. \* targetHttpProxies are used by global + external Application Load Balancers, classic Application + Load Balancers, cross-region internal Application Load + Balancers, and Traffic Director. \* + regionTargetHttpProxies are used by regional internal + Application Load Balancers and regional external + Application Load Balancers. Forwarding rules reference a + target HTTP proxy, and the target proxy then references + a URL map. For more information, read Using Target + Proxies and Forwarding rule concepts. """ @@ -718,7 +721,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -914,7 +917,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1024,7 +1027,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py index a80e2712fca9..c606acf643d7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py @@ -868,12 +868,16 @@ def sample_get(): \* [Regional](/compute/docs/reference/rest/v1/regionTargetHttpsProxies) A target HTTPS proxy is a component of GCP HTTPS load - balancers. \* targetHttpsProxies are used by external - HTTPS load balancers. \* regionTargetHttpsProxies are - used by internal HTTPS load balancers. Forwarding rules - reference a target HTTPS proxy, and the target proxy - then references a URL map. For more information, read - Using Target Proxies and Forwarding rule concepts. + balancers. \* targetHttpProxies are used by global + external Application Load Balancers, classic Application + Load Balancers, cross-region internal Application Load + Balancers, and Traffic Director. \* + regionTargetHttpProxies are used by regional internal + Application Load Balancers and regional external + Application Load Balancers. Forwarding rules reference a + target HTTPS proxy, and the target proxy then references + a URL map. For more information, read Using Target + Proxies and Forwarding rule concepts. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py index e30b5c87fa67..0a49ca2a5b4e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py @@ -644,7 +644,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -738,12 +738,16 @@ def __call__( \* `Regional `__ A target HTTPS proxy is a component of GCP HTTPS load - balancers. \* targetHttpsProxies are used by external - HTTPS load balancers. \* regionTargetHttpsProxies are - used by internal HTTPS load balancers. Forwarding rules - reference a target HTTPS proxy, and the target proxy - then references a URL map. For more information, read - Using Target Proxies and Forwarding rule concepts. + balancers. \* targetHttpProxies are used by global + external Application Load Balancers, classic Application + Load Balancers, cross-region internal Application Load + Balancers, and Traffic Director. \* + regionTargetHttpProxies are used by regional internal + Application Load Balancers and regional external + Application Load Balancers. Forwarding rules reference a + target HTTPS proxy, and the target proxy then references + a URL map. For more information, read Using Target + Proxies and Forwarding rule concepts. """ @@ -843,7 +847,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1041,7 +1045,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1151,7 +1155,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1263,7 +1267,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1375,7 +1379,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1487,7 +1491,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1597,7 +1601,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py index 04326226c592..e33608b80799 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py @@ -1363,6 +1363,327 @@ def sample_list(): # Done; return the response. return response + def set_security_policy_unary( + self, + request: Optional[ + Union[compute.SetSecurityPolicyTargetInstanceRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + target_instance: Optional[str] = None, + security_policy_reference_resource: Optional[ + compute.SecurityPolicyReference + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the Google Cloud Armor security policy for the + specified target instance. For more information, see + Google Cloud Armor Overview + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_security_policy(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyTargetInstanceRequest( + project="project_value", + target_instance="target_instance_value", + zone="zone_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSecurityPolicyTargetInstanceRequest, dict]): + The request object. A request message for + TargetInstances.SetSecurityPolicy. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone scoping this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_instance (str): + Name of the TargetInstance resource + to which the security policy should be + set. The name should conform to RFC1035. + + This corresponds to the ``target_instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + This corresponds to the ``security_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, zone, target_instance, security_policy_reference_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSecurityPolicyTargetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSecurityPolicyTargetInstanceRequest): + request = compute.SetSecurityPolicyTargetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if target_instance is not None: + request.target_instance = target_instance + if security_policy_reference_resource is not None: + request.security_policy_reference_resource = ( + security_policy_reference_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_security_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("target_instance", request.target_instance), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_security_policy( + self, + request: Optional[ + Union[compute.SetSecurityPolicyTargetInstanceRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + target_instance: Optional[str] = None, + security_policy_reference_resource: Optional[ + compute.SecurityPolicyReference + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the Google Cloud Armor security policy for the + specified target instance. For more information, see + Google Cloud Armor Overview + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_security_policy(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyTargetInstanceRequest( + project="project_value", + target_instance="target_instance_value", + zone="zone_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSecurityPolicyTargetInstanceRequest, dict]): + The request object. A request message for + TargetInstances.SetSecurityPolicy. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone scoping this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_instance (str): + Name of the TargetInstance resource + to which the security policy should be + set. The name should conform to RFC1035. + + This corresponds to the ``target_instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + This corresponds to the ``security_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, zone, target_instance, security_policy_reference_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSecurityPolicyTargetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSecurityPolicyTargetInstanceRequest): + request = compute.SetSecurityPolicyTargetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if target_instance is not None: + request.target_instance = target_instance + if security_policy_reference_resource is not None: + request.security_policy_reference_resource = ( + security_policy_reference_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_security_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("target_instance", request.target_instance), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def __enter__(self) -> "TargetInstancesClient": return self diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/transports/base.py index 56bb767a52c6..c510a848e423 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/transports/base.py @@ -152,6 +152,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.set_security_policy: gapic_v1.method.wrap_method( + self.set_security_policy, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -211,6 +216,15 @@ def list( ]: raise NotImplementedError() + @property + def set_security_policy( + self, + ) -> Callable[ + [compute.SetSecurityPolicyTargetInstanceRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/transports/rest.py index 8ddb7090c79c..495519ad71a9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/transports/rest.py @@ -103,6 +103,14 @@ def post_list(self, response): logging.log(f"Received response: {response}") return response + def pre_set_security_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_security_policy(self, response): + logging.log(f"Received response: {response}") + return response + transport = TargetInstancesRestTransport(interceptor=MyCustomTargetInstancesInterceptor()) client = TargetInstancesClient(transport=transport) @@ -218,6 +226,31 @@ def post_list( """ return response + def pre_set_security_policy( + self, + request: compute.SetSecurityPolicyTargetInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.SetSecurityPolicyTargetInstanceRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for set_security_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetInstances server. + """ + return request, metadata + + def post_set_security_policy( + self, response: compute.Operation + ) -> compute.Operation: + """Post-rpc interceptor for set_security_policy + + Override in a subclass to manipulate the response + after it is returned by the TargetInstances server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class TargetInstancesRestStub: @@ -458,7 +491,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -653,7 +686,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -801,6 +834,118 @@ def __call__( resp = self._interceptor.post_list(resp) return resp + class _SetSecurityPolicy(TargetInstancesRestStub): + def __hash__(self): + return hash("SetSecurityPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.SetSecurityPolicyTargetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the set security policy method over HTTP. + + Args: + request (~.compute.SetSecurityPolicyTargetInstanceRequest): + The request object. A request message for + TargetInstances.SetSecurityPolicy. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/targetInstances/{target_instance}/setSecurityPolicy", + "body": "security_policy_reference_resource", + }, + ] + request, metadata = self._interceptor.pre_set_security_policy( + request, metadata + ) + pb_request = compute.SetSecurityPolicyTargetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_security_policy(resp) + return resp + @property def aggregated_list( self, @@ -844,6 +989,14 @@ def list( # In C++ this would require a dynamic_cast return self._List(self._session, self._host, self._interceptor) # type: ignore + @property + def set_security_policy( + self, + ) -> Callable[[compute.SetSecurityPolicyTargetInstanceRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetSecurityPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py index 358a6914a134..7e299f986128 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py @@ -3075,6 +3075,327 @@ def error_code(self): # Done; return the response. return response + def set_security_policy_unary( + self, + request: Optional[ + Union[compute.SetSecurityPolicyTargetPoolRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + security_policy_reference_resource: Optional[ + compute.SecurityPolicyReference + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the Google Cloud Armor security policy for the + specified target pool. For more information, see Google + Cloud Armor Overview + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_security_policy(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSecurityPolicyTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.SetSecurityPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the TargetPool resource to + which the security policy should be set. + The name should conform to RFC1035. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + This corresponds to the ``security_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, region, target_pool, security_policy_reference_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSecurityPolicyTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSecurityPolicyTargetPoolRequest): + request = compute.SetSecurityPolicyTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + if security_policy_reference_resource is not None: + request.security_policy_reference_resource = ( + security_policy_reference_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_security_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_security_policy( + self, + request: Optional[ + Union[compute.SetSecurityPolicyTargetPoolRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + security_policy_reference_resource: Optional[ + compute.SecurityPolicyReference + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the Google Cloud Armor security policy for the + specified target pool. For more information, see Google + Cloud Armor Overview + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_security_policy(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSecurityPolicyTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.SetSecurityPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the TargetPool resource to + which the security policy should be set. + The name should conform to RFC1035. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + This corresponds to the ``security_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, region, target_pool, security_policy_reference_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSecurityPolicyTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSecurityPolicyTargetPoolRequest): + request = compute.SetSecurityPolicyTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + if security_policy_reference_resource is not None: + request.security_policy_reference_resource = ( + security_policy_reference_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_security_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def __enter__(self) -> "TargetPoolsClient": return self diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/transports/base.py index 7c7480543a5b..0a5f7f657494 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/transports/base.py @@ -182,6 +182,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.set_security_policy: gapic_v1.method.wrap_method( + self.set_security_policy, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -298,6 +303,15 @@ def set_backup( ]: raise NotImplementedError() + @property + def set_security_policy( + self, + ) -> Callable[ + [compute.SetSecurityPolicyTargetPoolRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/transports/rest.py index 12bde1245f3a..07c2f0039639 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/transports/rest.py @@ -151,6 +151,14 @@ def post_set_backup(self, response): logging.log(f"Received response: {response}") return response + def pre_set_security_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_security_policy(self, response): + logging.log(f"Received response: {response}") + return response + transport = TargetPoolsRestTransport(interceptor=MyCustomTargetPoolsInterceptor()) client = TargetPoolsClient(transport=transport) @@ -392,6 +400,29 @@ def post_set_backup(self, response: compute.Operation) -> compute.Operation: """ return response + def pre_set_security_policy( + self, + request: compute.SetSecurityPolicyTargetPoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.SetSecurityPolicyTargetPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_security_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetPools server. + """ + return request, metadata + + def post_set_security_policy( + self, response: compute.Operation + ) -> compute.Operation: + """Post-rpc interceptor for set_security_policy + + Override in a subclass to manipulate the response + after it is returned by the TargetPools server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class TargetPoolsRestStub: @@ -546,7 +577,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -658,7 +689,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -854,7 +885,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1144,7 +1175,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1342,7 +1373,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1454,7 +1485,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1564,7 +1595,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1624,6 +1655,118 @@ def __call__( resp = self._interceptor.post_set_backup(resp) return resp + class _SetSecurityPolicy(TargetPoolsRestStub): + def __hash__(self): + return hash("SetSecurityPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.SetSecurityPolicyTargetPoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the set security policy method over HTTP. + + Args: + request (~.compute.SetSecurityPolicyTargetPoolRequest): + The request object. A request message for + TargetPools.SetSecurityPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/setSecurityPolicy", + "body": "security_policy_reference_resource", + }, + ] + request, metadata = self._interceptor.pre_set_security_policy( + request, metadata + ) + pb_request = compute.SetSecurityPolicyTargetPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_security_policy(resp) + return resp + @property def add_health_check( self, @@ -1710,6 +1853,14 @@ def set_backup( # In C++ this would require a dynamic_cast return self._SetBackup(self._session, self._host, self._interceptor) # type: ignore + @property + def set_security_policy( + self, + ) -> Callable[[compute.SetSecurityPolicyTargetPoolRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetSecurityPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py index 6450c5782e7a..b1c8d334c61e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py @@ -498,7 +498,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -693,7 +693,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -891,7 +891,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1003,7 +1003,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1115,7 +1115,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1227,7 +1227,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1339,7 +1339,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py index 4c40322c2c35..494464de76c2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py @@ -522,7 +522,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -717,7 +717,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -915,7 +915,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1027,7 +1027,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py index de8de33c6b63..75c1369a07d1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py @@ -489,7 +489,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -681,7 +681,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -879,7 +879,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py index 95f64b4b763c..15b875bbeb01 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py @@ -861,19 +861,23 @@ def sample_get(): [Regional](/compute/docs/reference/rest/v1/regionUrlMaps) A URL map resource is a component of certain types of cloud load balancers and Traffic Director: \* urlMaps - are used by external HTTP(S) load balancers and Traffic - Director. \* regionUrlMaps are used by internal HTTP(S) - load balancers. For a list of supported URL map features - by the load balancer type, see the Load balancing - features: Routing and traffic management table. For a - list of supported URL map features for Traffic Director, - see the Traffic Director features: Routing and traffic - management table. This resource defines mappings from - hostnames and URL paths to either a backend service or a - backend bucket. To use the global urlMaps resource, the - backend service must have a loadBalancingScheme of - either EXTERNAL or INTERNAL_SELF_MANAGED. To use the - regionUrlMaps resource, the backend service must have a + are used by global external Application Load Balancers, + classic Application Load Balancers, and cross-region + internal Application Load Balancers. \* regionUrlMaps + are used by internal Application Load Balancers, + regional external Application Load Balancers and + regional internal Application Load Balancers. For a list + of supported URL map features by the load balancer type, + see the Load balancing features: Routing and traffic + management table. For a list of supported URL map + features for Traffic Director, see the Traffic Director + features: Routing and traffic management table. This + resource defines mappings from hostnames and URL paths + to either a backend service or a backend bucket. To use + the global urlMaps resource, the backend service must + have a loadBalancingScheme of either EXTERNAL or + INTERNAL_SELF_MANAGED. To use the regionUrlMaps + resource, the backend service must have a loadBalancingScheme of INTERNAL_MANAGED. For more information, read URL Map Concepts. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/transports/rest.py index ba76e8c1c9e4..16f7829058ff 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/transports/rest.py @@ -561,7 +561,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -654,19 +654,23 @@ def __call__( `Regional `__ A URL map resource is a component of certain types of cloud load balancers and Traffic Director: \* urlMaps - are used by external HTTP(S) load balancers and Traffic - Director. \* regionUrlMaps are used by internal HTTP(S) - load balancers. For a list of supported URL map features - by the load balancer type, see the Load balancing - features: Routing and traffic management table. For a - list of supported URL map features for Traffic Director, - see the Traffic Director features: Routing and traffic - management table. This resource defines mappings from - hostnames and URL paths to either a backend service or a - backend bucket. To use the global urlMaps resource, the - backend service must have a loadBalancingScheme of - either EXTERNAL or INTERNAL_SELF_MANAGED. To use the - regionUrlMaps resource, the backend service must have a + are used by global external Application Load Balancers, + classic Application Load Balancers, and cross-region + internal Application Load Balancers. \* regionUrlMaps + are used by internal Application Load Balancers, + regional external Application Load Balancers and + regional internal Application Load Balancers. For a list + of supported URL map features by the load balancer type, + see the Load balancing features: Routing and traffic + management table. For a list of supported URL map + features for Traffic Director, see the Traffic Director + features: Routing and traffic management table. This + resource defines mappings from hostnames and URL paths + to either a backend service or a backend bucket. To use + the global urlMaps resource, the backend service must + have a loadBalancingScheme of either EXTERNAL or + INTERNAL_SELF_MANAGED. To use the regionUrlMaps + resource, the backend service must have a loadBalancingScheme of INTERNAL_MANAGED. For more information, read URL Map Concepts. @@ -767,7 +771,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -877,7 +881,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1073,7 +1077,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1182,7 +1186,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py index caedeaf0510c..915b278efdfc 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py @@ -545,7 +545,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -827,7 +827,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -1025,7 +1025,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py index 116843f9b342..39b344310804 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py @@ -483,7 +483,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -672,7 +672,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -870,7 +870,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py index e1256cc57abd..47485e5c1ea1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py @@ -634,7 +634,7 @@ def sample_get(): - For global operations, use the globalOperations resource. - For regional operations, use the regionOperations resource. - For zonal operations, use - the zonalOperations resource. For more information, read + the zoneOperations resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -914,7 +914,7 @@ def sample_wait(): - For global operations, use the globalOperations resource. - For regional operations, use the regionOperations resource. - For zonal operations, use - the zonalOperations resource. For more information, read + the zoneOperations resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/transports/rest.py index 9275f30261b1..77139bd55a12 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/transports/rest.py @@ -430,7 +430,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ @@ -619,7 +619,7 @@ def __call__( - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, - use the ``zonalOperations`` resource. For more + use the ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. """ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py index e310d187d528..0eadc25e7caf 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py @@ -42,6 +42,7 @@ AddRuleFirewallPolicyRequest, AddRuleNetworkFirewallPolicyRequest, AddRuleRegionNetworkFirewallPolicyRequest, + AddRuleRegionSecurityPolicyRequest, AddRuleSecurityPolicyRequest, AddSignedUrlKeyBackendBucketRequest, AddSignedUrlKeyBackendServiceRequest, @@ -94,6 +95,8 @@ AllocationSpecificSKUAllocationReservedInstanceProperties, AllocationSpecificSKUReservation, Allowed, + AnnouncePublicAdvertisedPrefixeRequest, + AnnouncePublicDelegatedPrefixeRequest, ApplyUpdatesToInstancesInstanceGroupManagerRequest, ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, AttachDiskInstanceRequest, @@ -101,6 +104,7 @@ AttachedDiskInitializeParams, AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, AttachNetworkEndpointsNetworkEndpointGroupRequest, + AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, AuditConfig, AuditLogConfig, AuthorizationLoggingOptions, @@ -115,6 +119,7 @@ AutoscalingPolicyLoadBalancingUtilization, AutoscalingPolicyScaleInControl, AutoscalingPolicyScalingSchedule, + AWSV4Signature, Backend, BackendBucket, BackendBucketCdnPolicy, @@ -132,12 +137,14 @@ BackendServiceGroupHealth, BackendServiceIAP, BackendServiceList, + BackendServiceListUsable, BackendServiceLocalityLoadBalancingPolicyConfig, BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy, BackendServiceLocalityLoadBalancingPolicyConfigPolicy, BackendServiceLogConfig, BackendServiceReference, BackendServicesScopedList, + BackendServiceUsedBy, BfdPacket, BfdStatus, BfdStatusPacketCounts, @@ -147,6 +154,7 @@ BulkInsertInstanceRequest, BulkInsertInstanceResource, BulkInsertInstanceResourcePerInstanceProperties, + BulkInsertOperationStatus, BulkInsertRegionDiskRequest, BulkInsertRegionInstanceRequest, CacheInvalidationRule, @@ -265,6 +273,7 @@ DetachDiskInstanceRequest, DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, DetachNetworkEndpointsNetworkEndpointGroupRequest, + DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, DisableXpnHostProjectRequest, DisableXpnResourceProjectRequest, Disk, @@ -353,6 +362,7 @@ GetHealthCheckRequest, GetHealthRegionBackendServiceRequest, GetHealthTargetPoolRequest, + GetIamPolicyBackendBucketRequest, GetIamPolicyBackendServiceRequest, GetIamPolicyDiskRequest, GetIamPolicyFirewallPolicyRequest, @@ -387,6 +397,8 @@ GetLicenseRequest, GetMachineImageRequest, GetMachineTypeRequest, + GetMacsecConfigInterconnectRequest, + GetNatIpInfoRouterRequest, GetNatMappingInfoRoutersRequest, GetNetworkAttachmentRequest, GetNetworkEdgeSecurityServiceRequest, @@ -430,6 +442,7 @@ GetRuleFirewallPolicyRequest, GetRuleNetworkFirewallPolicyRequest, GetRuleRegionNetworkFirewallPolicyRequest, + GetRuleRegionSecurityPolicyRequest, GetRuleSecurityPolicyRequest, GetScreenshotInstanceRequest, GetSecurityPolicyRequest, @@ -437,6 +450,7 @@ GetServiceAttachmentRequest, GetShieldedInstanceIdentityInstanceRequest, GetSnapshotRequest, + GetSnapshotSettingRequest, GetSslCertificateRequest, GetSslPolicyRequest, GetStatusVpnGatewayRequest, @@ -621,6 +635,7 @@ InstanceProperties, InstanceReference, InstancesAddResourcePoliciesRequest, + InstancesBulkInsertOperationMetadata, InstancesGetEffectiveFirewallsResponse, InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, InstancesRemoveResourcePoliciesRequest, @@ -630,6 +645,7 @@ InstancesSetMachineTypeRequest, InstancesSetMinCpuPlatformRequest, InstancesSetNameRequest, + InstancesSetSecurityPolicyRequest, InstancesSetServiceAccountRequest, InstancesStartWithEncryptionKeyRequest, InstanceTemplate, @@ -653,10 +669,15 @@ InterconnectDiagnosticsLinkLACPStatus, InterconnectDiagnosticsLinkOpticalPower, InterconnectDiagnosticsLinkStatus, + InterconnectDiagnosticsMacsecStatus, InterconnectList, InterconnectLocation, InterconnectLocationList, InterconnectLocationRegionInfo, + InterconnectMacsec, + InterconnectMacsecConfig, + InterconnectMacsecConfigPreSharedKey, + InterconnectMacsecPreSharedKey, InterconnectOutageNotification, InterconnectRemoteLocation, InterconnectRemoteLocationConstraints, @@ -664,6 +685,7 @@ InterconnectRemoteLocationList, InterconnectRemoteLocationPermittedConnections, InterconnectsGetDiagnosticsResponse, + InterconnectsGetMacsecConfigResponse, InvalidateCacheUrlMapRequest, Items, License, @@ -715,6 +737,7 @@ ListNetworkEndpointGroupsRequest, ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, ListNetworkEndpointsNetworkEndpointGroupsRequest, + ListNetworkEndpointsRegionNetworkEndpointGroupsRequest, ListNetworkFirewallPoliciesRequest, ListNetworksRequest, ListNodeGroupsRequest, @@ -770,6 +793,8 @@ ListTargetTcpProxiesRequest, ListTargetVpnGatewaysRequest, ListUrlMapsRequest, + ListUsableBackendServicesRequest, + ListUsableRegionBackendServicesRequest, ListUsableSubnetworksRequest, ListVpnGatewaysRequest, ListVpnTunnelsRequest, @@ -805,6 +830,9 @@ MoveGlobalAddressRequest, MoveInstanceProjectRequest, NamedPort, + NatIpInfo, + NatIpInfoNatIpInfoMapping, + NatIpInfoResponse, Network, NetworkAttachment, NetworkAttachmentAggregatedList, @@ -891,6 +919,7 @@ PatchInstanceGroupManagerRequest, PatchInterconnectAttachmentRequest, PatchInterconnectRequest, + PatchNetworkAttachmentRequest, PatchNetworkEdgeSecurityServiceRequest, PatchNetworkFirewallPolicyRequest, PatchNetworkRequest, @@ -915,9 +944,11 @@ PatchRuleFirewallPolicyRequest, PatchRuleNetworkFirewallPolicyRequest, PatchRuleRegionNetworkFirewallPolicyRequest, + PatchRuleRegionSecurityPolicyRequest, PatchRuleSecurityPolicyRequest, PatchSecurityPolicyRequest, PatchServiceAttachmentRequest, + PatchSnapshotSettingRequest, PatchSslPolicyRequest, PatchSubnetworkRequest, PatchTargetGrpcProxyRequest, @@ -931,6 +962,8 @@ PreconfiguredWafSet, PreservedState, PreservedStatePreservedDisk, + PreservedStatePreservedNetworkIp, + PreservedStatePreservedNetworkIpIpAddress, PreviewRouterRequest, Project, ProjectsDisableXpnResourceRequest, @@ -979,6 +1012,8 @@ RegionInstanceGroupsListInstancesRequest, RegionInstanceGroupsSetNamedPortsRequest, RegionList, + RegionNetworkEndpointGroupsAttachEndpointsRequest, + RegionNetworkEndpointGroupsDetachEndpointsRequest, RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse, RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, RegionSetLabelsRequest, @@ -998,6 +1033,7 @@ RemoveRuleFirewallPolicyRequest, RemoveRuleNetworkFirewallPolicyRequest, RemoveRuleRegionNetworkFirewallPolicyRequest, + RemoveRuleRegionSecurityPolicyRequest, RemoveRuleSecurityPolicyRequest, RequestMirrorPolicy, Reservation, @@ -1074,6 +1110,7 @@ SecurityPolicy, SecurityPolicyAdaptiveProtectionConfig, SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig, + SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig, SecurityPolicyAdvancedOptionsConfig, SecurityPolicyAdvancedOptionsConfigJsonCustomConfig, SecurityPolicyDdosProtectionConfig, @@ -1085,6 +1122,8 @@ SecurityPolicyRuleHttpHeaderActionHttpHeaderOption, SecurityPolicyRuleMatcher, SecurityPolicyRuleMatcherConfig, + SecurityPolicyRuleNetworkMatcher, + SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch, SecurityPolicyRulePreconfiguredWafConfig, SecurityPolicyRulePreconfiguredWafConfigExclusion, SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams, @@ -1092,6 +1131,7 @@ SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig, SecurityPolicyRuleRateLimitOptionsThreshold, SecurityPolicyRuleRedirectOptions, + SecurityPolicyUserDefinedField, SecuritySettings, SendDiagnosticInterruptInstanceRequest, SendDiagnosticInterruptInstanceResponse, @@ -1109,12 +1149,15 @@ SetBackupTargetPoolRequest, SetCertificateMapTargetHttpsProxyRequest, SetCertificateMapTargetSslProxyRequest, + SetCommonInstanceMetadataOperationMetadata, + SetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo, SetCommonInstanceMetadataProjectRequest, SetDefaultNetworkTierProjectRequest, SetDeletionProtectionInstanceRequest, SetDiskAutoDeleteInstanceRequest, SetEdgeSecurityPolicyBackendBucketRequest, SetEdgeSecurityPolicyBackendServiceRequest, + SetIamPolicyBackendBucketRequest, SetIamPolicyBackendServiceRequest, SetIamPolicyDiskRequest, SetIamPolicyFirewallPolicyRequest, @@ -1167,6 +1210,10 @@ SetQuicOverrideTargetHttpsProxyRequest, SetSchedulingInstanceRequest, SetSecurityPolicyBackendServiceRequest, + SetSecurityPolicyInstanceRequest, + SetSecurityPolicyRegionBackendServiceRequest, + SetSecurityPolicyTargetInstanceRequest, + SetSecurityPolicyTargetPoolRequest, SetServiceAccountInstanceRequest, SetShieldedInstanceIntegrityPolicyInstanceRequest, SetSslCertificatesRegionTargetHttpsProxyRequest, @@ -1195,6 +1242,9 @@ SimulateMaintenanceEventNodeGroupRequest, Snapshot, SnapshotList, + SnapshotSettings, + SnapshotSettingsStorageLocationSettings, + SnapshotSettingsStorageLocationSettingsStorageLocationPreference, SourceDiskEncryptionKey, SourceInstanceParams, SourceInstanceProperties, @@ -1218,6 +1268,8 @@ StatefulPolicy, StatefulPolicyPreservedState, StatefulPolicyPreservedStateDiskDevice, + StatefulPolicyPreservedStateNetworkIp, + Status, StopAsyncReplicationDiskRequest, StopAsyncReplicationRegionDiskRequest, StopGroupAsyncReplicationDiskRequest, @@ -1280,6 +1332,8 @@ TargetVpnGatewaysScopedList, TCPHealthCheck, TestFailure, + TestIamPermissionsBackendBucketRequest, + TestIamPermissionsBackendServiceRequest, TestIamPermissionsDiskRequest, TestIamPermissionsExternalVpnGatewayRequest, TestIamPermissionsFirewallPolicyRequest, @@ -1295,6 +1349,7 @@ TestIamPermissionsNodeGroupRequest, TestIamPermissionsNodeTemplateRequest, TestIamPermissionsPacketMirroringRequest, + TestIamPermissionsRegionBackendServiceRequest, TestIamPermissionsRegionDiskRequest, TestIamPermissionsRegionNetworkFirewallPolicyRequest, TestIamPermissionsReservationRequest, @@ -1306,6 +1361,7 @@ TestPermissionsRequest, TestPermissionsResponse, Uint128, + UpcomingMaintenance, UpdateAccessConfigInstanceRequest, UpdateAutoscalerRequest, UpdateBackendBucketRequest, @@ -1372,6 +1428,8 @@ Warning, Warnings, WeightedBackendService, + WithdrawPublicAdvertisedPrefixeRequest, + WithdrawPublicDelegatedPrefixeRequest, XpnHostList, XpnResourceId, Zone, @@ -1409,6 +1467,7 @@ "AddRuleFirewallPolicyRequest", "AddRuleNetworkFirewallPolicyRequest", "AddRuleRegionNetworkFirewallPolicyRequest", + "AddRuleRegionSecurityPolicyRequest", "AddRuleSecurityPolicyRequest", "AddSignedUrlKeyBackendBucketRequest", "AddSignedUrlKeyBackendServiceRequest", @@ -1461,6 +1520,8 @@ "AllocationSpecificSKUAllocationReservedInstanceProperties", "AllocationSpecificSKUReservation", "Allowed", + "AnnouncePublicAdvertisedPrefixeRequest", + "AnnouncePublicDelegatedPrefixeRequest", "ApplyUpdatesToInstancesInstanceGroupManagerRequest", "ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest", "AttachDiskInstanceRequest", @@ -1468,6 +1529,7 @@ "AttachedDiskInitializeParams", "AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest", "AttachNetworkEndpointsNetworkEndpointGroupRequest", + "AttachNetworkEndpointsRegionNetworkEndpointGroupRequest", "AuditConfig", "AuditLogConfig", "AuthorizationLoggingOptions", @@ -1482,6 +1544,7 @@ "AutoscalingPolicyLoadBalancingUtilization", "AutoscalingPolicyScaleInControl", "AutoscalingPolicyScalingSchedule", + "AWSV4Signature", "Backend", "BackendBucket", "BackendBucketCdnPolicy", @@ -1499,12 +1562,14 @@ "BackendServiceGroupHealth", "BackendServiceIAP", "BackendServiceList", + "BackendServiceListUsable", "BackendServiceLocalityLoadBalancingPolicyConfig", "BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy", "BackendServiceLocalityLoadBalancingPolicyConfigPolicy", "BackendServiceLogConfig", "BackendServiceReference", "BackendServicesScopedList", + "BackendServiceUsedBy", "BfdPacket", "BfdStatus", "BfdStatusPacketCounts", @@ -1514,6 +1579,7 @@ "BulkInsertInstanceRequest", "BulkInsertInstanceResource", "BulkInsertInstanceResourcePerInstanceProperties", + "BulkInsertOperationStatus", "BulkInsertRegionDiskRequest", "BulkInsertRegionInstanceRequest", "CacheInvalidationRule", @@ -1632,6 +1698,7 @@ "DetachDiskInstanceRequest", "DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest", "DetachNetworkEndpointsNetworkEndpointGroupRequest", + "DetachNetworkEndpointsRegionNetworkEndpointGroupRequest", "DisableXpnHostProjectRequest", "DisableXpnResourceProjectRequest", "Disk", @@ -1720,6 +1787,7 @@ "GetHealthCheckRequest", "GetHealthRegionBackendServiceRequest", "GetHealthTargetPoolRequest", + "GetIamPolicyBackendBucketRequest", "GetIamPolicyBackendServiceRequest", "GetIamPolicyDiskRequest", "GetIamPolicyFirewallPolicyRequest", @@ -1754,6 +1822,8 @@ "GetLicenseRequest", "GetMachineImageRequest", "GetMachineTypeRequest", + "GetMacsecConfigInterconnectRequest", + "GetNatIpInfoRouterRequest", "GetNatMappingInfoRoutersRequest", "GetNetworkAttachmentRequest", "GetNetworkEdgeSecurityServiceRequest", @@ -1797,6 +1867,7 @@ "GetRuleFirewallPolicyRequest", "GetRuleNetworkFirewallPolicyRequest", "GetRuleRegionNetworkFirewallPolicyRequest", + "GetRuleRegionSecurityPolicyRequest", "GetRuleSecurityPolicyRequest", "GetScreenshotInstanceRequest", "GetSecurityPolicyRequest", @@ -1804,6 +1875,7 @@ "GetServiceAttachmentRequest", "GetShieldedInstanceIdentityInstanceRequest", "GetSnapshotRequest", + "GetSnapshotSettingRequest", "GetSslCertificateRequest", "GetSslPolicyRequest", "GetStatusVpnGatewayRequest", @@ -1988,6 +2060,7 @@ "InstanceProperties", "InstanceReference", "InstancesAddResourcePoliciesRequest", + "InstancesBulkInsertOperationMetadata", "InstancesGetEffectiveFirewallsResponse", "InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", "InstancesRemoveResourcePoliciesRequest", @@ -1997,6 +2070,7 @@ "InstancesSetMachineTypeRequest", "InstancesSetMinCpuPlatformRequest", "InstancesSetNameRequest", + "InstancesSetSecurityPolicyRequest", "InstancesSetServiceAccountRequest", "InstancesStartWithEncryptionKeyRequest", "InstanceTemplate", @@ -2020,10 +2094,15 @@ "InterconnectDiagnosticsLinkLACPStatus", "InterconnectDiagnosticsLinkOpticalPower", "InterconnectDiagnosticsLinkStatus", + "InterconnectDiagnosticsMacsecStatus", "InterconnectList", "InterconnectLocation", "InterconnectLocationList", "InterconnectLocationRegionInfo", + "InterconnectMacsec", + "InterconnectMacsecConfig", + "InterconnectMacsecConfigPreSharedKey", + "InterconnectMacsecPreSharedKey", "InterconnectOutageNotification", "InterconnectRemoteLocation", "InterconnectRemoteLocationConstraints", @@ -2031,6 +2110,7 @@ "InterconnectRemoteLocationList", "InterconnectRemoteLocationPermittedConnections", "InterconnectsGetDiagnosticsResponse", + "InterconnectsGetMacsecConfigResponse", "InvalidateCacheUrlMapRequest", "Items", "License", @@ -2082,6 +2162,7 @@ "ListNetworkEndpointGroupsRequest", "ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest", "ListNetworkEndpointsNetworkEndpointGroupsRequest", + "ListNetworkEndpointsRegionNetworkEndpointGroupsRequest", "ListNetworkFirewallPoliciesRequest", "ListNetworksRequest", "ListNodeGroupsRequest", @@ -2137,6 +2218,8 @@ "ListTargetTcpProxiesRequest", "ListTargetVpnGatewaysRequest", "ListUrlMapsRequest", + "ListUsableBackendServicesRequest", + "ListUsableRegionBackendServicesRequest", "ListUsableSubnetworksRequest", "ListVpnGatewaysRequest", "ListVpnTunnelsRequest", @@ -2172,6 +2255,9 @@ "MoveGlobalAddressRequest", "MoveInstanceProjectRequest", "NamedPort", + "NatIpInfo", + "NatIpInfoNatIpInfoMapping", + "NatIpInfoResponse", "Network", "NetworkAttachment", "NetworkAttachmentAggregatedList", @@ -2258,6 +2344,7 @@ "PatchInstanceGroupManagerRequest", "PatchInterconnectAttachmentRequest", "PatchInterconnectRequest", + "PatchNetworkAttachmentRequest", "PatchNetworkEdgeSecurityServiceRequest", "PatchNetworkFirewallPolicyRequest", "PatchNetworkRequest", @@ -2282,9 +2369,11 @@ "PatchRuleFirewallPolicyRequest", "PatchRuleNetworkFirewallPolicyRequest", "PatchRuleRegionNetworkFirewallPolicyRequest", + "PatchRuleRegionSecurityPolicyRequest", "PatchRuleSecurityPolicyRequest", "PatchSecurityPolicyRequest", "PatchServiceAttachmentRequest", + "PatchSnapshotSettingRequest", "PatchSslPolicyRequest", "PatchSubnetworkRequest", "PatchTargetGrpcProxyRequest", @@ -2298,6 +2387,8 @@ "PreconfiguredWafSet", "PreservedState", "PreservedStatePreservedDisk", + "PreservedStatePreservedNetworkIp", + "PreservedStatePreservedNetworkIpIpAddress", "PreviewRouterRequest", "Project", "ProjectsDisableXpnResourceRequest", @@ -2346,6 +2437,8 @@ "RegionInstanceGroupsListInstancesRequest", "RegionInstanceGroupsSetNamedPortsRequest", "RegionList", + "RegionNetworkEndpointGroupsAttachEndpointsRequest", + "RegionNetworkEndpointGroupsDetachEndpointsRequest", "RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse", "RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", "RegionSetLabelsRequest", @@ -2365,6 +2458,7 @@ "RemoveRuleFirewallPolicyRequest", "RemoveRuleNetworkFirewallPolicyRequest", "RemoveRuleRegionNetworkFirewallPolicyRequest", + "RemoveRuleRegionSecurityPolicyRequest", "RemoveRuleSecurityPolicyRequest", "RequestMirrorPolicy", "Reservation", @@ -2441,6 +2535,7 @@ "SecurityPolicy", "SecurityPolicyAdaptiveProtectionConfig", "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig", + "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig", "SecurityPolicyAdvancedOptionsConfig", "SecurityPolicyAdvancedOptionsConfigJsonCustomConfig", "SecurityPolicyDdosProtectionConfig", @@ -2452,6 +2547,8 @@ "SecurityPolicyRuleHttpHeaderActionHttpHeaderOption", "SecurityPolicyRuleMatcher", "SecurityPolicyRuleMatcherConfig", + "SecurityPolicyRuleNetworkMatcher", + "SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch", "SecurityPolicyRulePreconfiguredWafConfig", "SecurityPolicyRulePreconfiguredWafConfigExclusion", "SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams", @@ -2459,6 +2556,7 @@ "SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig", "SecurityPolicyRuleRateLimitOptionsThreshold", "SecurityPolicyRuleRedirectOptions", + "SecurityPolicyUserDefinedField", "SecuritySettings", "SendDiagnosticInterruptInstanceRequest", "SendDiagnosticInterruptInstanceResponse", @@ -2476,12 +2574,15 @@ "SetBackupTargetPoolRequest", "SetCertificateMapTargetHttpsProxyRequest", "SetCertificateMapTargetSslProxyRequest", + "SetCommonInstanceMetadataOperationMetadata", + "SetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo", "SetCommonInstanceMetadataProjectRequest", "SetDefaultNetworkTierProjectRequest", "SetDeletionProtectionInstanceRequest", "SetDiskAutoDeleteInstanceRequest", "SetEdgeSecurityPolicyBackendBucketRequest", "SetEdgeSecurityPolicyBackendServiceRequest", + "SetIamPolicyBackendBucketRequest", "SetIamPolicyBackendServiceRequest", "SetIamPolicyDiskRequest", "SetIamPolicyFirewallPolicyRequest", @@ -2534,6 +2635,10 @@ "SetQuicOverrideTargetHttpsProxyRequest", "SetSchedulingInstanceRequest", "SetSecurityPolicyBackendServiceRequest", + "SetSecurityPolicyInstanceRequest", + "SetSecurityPolicyRegionBackendServiceRequest", + "SetSecurityPolicyTargetInstanceRequest", + "SetSecurityPolicyTargetPoolRequest", "SetServiceAccountInstanceRequest", "SetShieldedInstanceIntegrityPolicyInstanceRequest", "SetSslCertificatesRegionTargetHttpsProxyRequest", @@ -2562,6 +2667,9 @@ "SimulateMaintenanceEventNodeGroupRequest", "Snapshot", "SnapshotList", + "SnapshotSettings", + "SnapshotSettingsStorageLocationSettings", + "SnapshotSettingsStorageLocationSettingsStorageLocationPreference", "SourceDiskEncryptionKey", "SourceInstanceParams", "SourceInstanceProperties", @@ -2585,6 +2693,8 @@ "StatefulPolicy", "StatefulPolicyPreservedState", "StatefulPolicyPreservedStateDiskDevice", + "StatefulPolicyPreservedStateNetworkIp", + "Status", "StopAsyncReplicationDiskRequest", "StopAsyncReplicationRegionDiskRequest", "StopGroupAsyncReplicationDiskRequest", @@ -2647,6 +2757,8 @@ "TargetVpnGatewaysScopedList", "TCPHealthCheck", "TestFailure", + "TestIamPermissionsBackendBucketRequest", + "TestIamPermissionsBackendServiceRequest", "TestIamPermissionsDiskRequest", "TestIamPermissionsExternalVpnGatewayRequest", "TestIamPermissionsFirewallPolicyRequest", @@ -2662,6 +2774,7 @@ "TestIamPermissionsNodeGroupRequest", "TestIamPermissionsNodeTemplateRequest", "TestIamPermissionsPacketMirroringRequest", + "TestIamPermissionsRegionBackendServiceRequest", "TestIamPermissionsRegionDiskRequest", "TestIamPermissionsRegionNetworkFirewallPolicyRequest", "TestIamPermissionsReservationRequest", @@ -2673,6 +2786,7 @@ "TestPermissionsRequest", "TestPermissionsResponse", "Uint128", + "UpcomingMaintenance", "UpdateAccessConfigInstanceRequest", "UpdateAutoscalerRequest", "UpdateBackendBucketRequest", @@ -2739,6 +2853,8 @@ "Warning", "Warnings", "WeightedBackendService", + "WithdrawPublicAdvertisedPrefixeRequest", + "WithdrawPublicDelegatedPrefixeRequest", "XpnHostList", "XpnResourceId", "Zone", diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py b/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py index 655924e2b587..7feeb9834d66 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py @@ -17,11 +17,13 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import any_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.compute.v1", manifest={ + "AWSV4Signature", "AbandonInstancesInstanceGroupManagerRequest", "AbandonInstancesRegionInstanceGroupManagerRequest", "AcceleratorConfig", @@ -46,6 +48,7 @@ "AddRuleFirewallPolicyRequest", "AddRuleNetworkFirewallPolicyRequest", "AddRuleRegionNetworkFirewallPolicyRequest", + "AddRuleRegionSecurityPolicyRequest", "AddRuleSecurityPolicyRequest", "AddSignedUrlKeyBackendBucketRequest", "AddSignedUrlKeyBackendServiceRequest", @@ -102,11 +105,14 @@ "AllocationSpecificSKUAllocationReservedInstanceProperties", "AllocationSpecificSKUReservation", "Allowed", + "AnnouncePublicAdvertisedPrefixeRequest", + "AnnouncePublicDelegatedPrefixeRequest", "ApplyUpdatesToInstancesInstanceGroupManagerRequest", "ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest", "AttachDiskInstanceRequest", "AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest", "AttachNetworkEndpointsNetworkEndpointGroupRequest", + "AttachNetworkEndpointsRegionNetworkEndpointGroupRequest", "AttachedDisk", "AttachedDiskInitializeParams", "AuditConfig", @@ -140,11 +146,13 @@ "BackendServiceGroupHealth", "BackendServiceIAP", "BackendServiceList", + "BackendServiceListUsable", "BackendServiceLocalityLoadBalancingPolicyConfig", "BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy", "BackendServiceLocalityLoadBalancingPolicyConfigPolicy", "BackendServiceLogConfig", "BackendServiceReference", + "BackendServiceUsedBy", "BackendServicesScopedList", "BfdPacket", "BfdStatus", @@ -155,6 +163,7 @@ "BulkInsertInstanceRequest", "BulkInsertInstanceResource", "BulkInsertInstanceResourcePerInstanceProperties", + "BulkInsertOperationStatus", "BulkInsertRegionDiskRequest", "BulkInsertRegionInstanceRequest", "CacheInvalidationRule", @@ -273,6 +282,7 @@ "DetachDiskInstanceRequest", "DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest", "DetachNetworkEndpointsNetworkEndpointGroupRequest", + "DetachNetworkEndpointsRegionNetworkEndpointGroupRequest", "DisableXpnHostProjectRequest", "DisableXpnResourceProjectRequest", "Disk", @@ -362,6 +372,7 @@ "GetHealthCheckRequest", "GetHealthRegionBackendServiceRequest", "GetHealthTargetPoolRequest", + "GetIamPolicyBackendBucketRequest", "GetIamPolicyBackendServiceRequest", "GetIamPolicyDiskRequest", "GetIamPolicyFirewallPolicyRequest", @@ -396,6 +407,8 @@ "GetLicenseRequest", "GetMachineImageRequest", "GetMachineTypeRequest", + "GetMacsecConfigInterconnectRequest", + "GetNatIpInfoRouterRequest", "GetNatMappingInfoRoutersRequest", "GetNetworkAttachmentRequest", "GetNetworkEdgeSecurityServiceRequest", @@ -439,6 +452,7 @@ "GetRuleFirewallPolicyRequest", "GetRuleNetworkFirewallPolicyRequest", "GetRuleRegionNetworkFirewallPolicyRequest", + "GetRuleRegionSecurityPolicyRequest", "GetRuleSecurityPolicyRequest", "GetScreenshotInstanceRequest", "GetSecurityPolicyRequest", @@ -446,6 +460,7 @@ "GetServiceAttachmentRequest", "GetShieldedInstanceIdentityInstanceRequest", "GetSnapshotRequest", + "GetSnapshotSettingRequest", "GetSslCertificateRequest", "GetSslPolicyRequest", "GetStatusVpnGatewayRequest", @@ -634,6 +649,7 @@ "InstanceTemplatesScopedList", "InstanceWithNamedPorts", "InstancesAddResourcePoliciesRequest", + "InstancesBulkInsertOperationMetadata", "InstancesGetEffectiveFirewallsResponse", "InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", "InstancesRemoveResourcePoliciesRequest", @@ -643,6 +659,7 @@ "InstancesSetMachineTypeRequest", "InstancesSetMinCpuPlatformRequest", "InstancesSetNameRequest", + "InstancesSetSecurityPolicyRequest", "InstancesSetServiceAccountRequest", "InstancesStartWithEncryptionKeyRequest", "Int64RangeMatch", @@ -661,10 +678,15 @@ "InterconnectDiagnosticsLinkLACPStatus", "InterconnectDiagnosticsLinkOpticalPower", "InterconnectDiagnosticsLinkStatus", + "InterconnectDiagnosticsMacsecStatus", "InterconnectList", "InterconnectLocation", "InterconnectLocationList", "InterconnectLocationRegionInfo", + "InterconnectMacsec", + "InterconnectMacsecConfig", + "InterconnectMacsecConfigPreSharedKey", + "InterconnectMacsecPreSharedKey", "InterconnectOutageNotification", "InterconnectRemoteLocation", "InterconnectRemoteLocationConstraints", @@ -672,6 +694,7 @@ "InterconnectRemoteLocationList", "InterconnectRemoteLocationPermittedConnections", "InterconnectsGetDiagnosticsResponse", + "InterconnectsGetMacsecConfigResponse", "InvalidateCacheUrlMapRequest", "Items", "License", @@ -723,6 +746,7 @@ "ListNetworkEndpointGroupsRequest", "ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest", "ListNetworkEndpointsNetworkEndpointGroupsRequest", + "ListNetworkEndpointsRegionNetworkEndpointGroupsRequest", "ListNetworkFirewallPoliciesRequest", "ListNetworksRequest", "ListNodeGroupsRequest", @@ -778,6 +802,8 @@ "ListTargetTcpProxiesRequest", "ListTargetVpnGatewaysRequest", "ListUrlMapsRequest", + "ListUsableBackendServicesRequest", + "ListUsableRegionBackendServicesRequest", "ListUsableSubnetworksRequest", "ListVpnGatewaysRequest", "ListVpnTunnelsRequest", @@ -813,6 +839,9 @@ "MoveGlobalAddressRequest", "MoveInstanceProjectRequest", "NamedPort", + "NatIpInfo", + "NatIpInfoNatIpInfoMapping", + "NatIpInfoResponse", "Network", "NetworkAttachment", "NetworkAttachmentAggregatedList", @@ -899,6 +928,7 @@ "PatchInstanceGroupManagerRequest", "PatchInterconnectAttachmentRequest", "PatchInterconnectRequest", + "PatchNetworkAttachmentRequest", "PatchNetworkEdgeSecurityServiceRequest", "PatchNetworkFirewallPolicyRequest", "PatchNetworkRequest", @@ -923,9 +953,11 @@ "PatchRuleFirewallPolicyRequest", "PatchRuleNetworkFirewallPolicyRequest", "PatchRuleRegionNetworkFirewallPolicyRequest", + "PatchRuleRegionSecurityPolicyRequest", "PatchRuleSecurityPolicyRequest", "PatchSecurityPolicyRequest", "PatchServiceAttachmentRequest", + "PatchSnapshotSettingRequest", "PatchSslPolicyRequest", "PatchSubnetworkRequest", "PatchTargetGrpcProxyRequest", @@ -939,6 +971,8 @@ "PreconfiguredWafSet", "PreservedState", "PreservedStatePreservedDisk", + "PreservedStatePreservedNetworkIp", + "PreservedStatePreservedNetworkIpIpAddress", "PreviewRouterRequest", "Project", "ProjectsDisableXpnResourceRequest", @@ -987,6 +1021,8 @@ "RegionInstanceGroupsListInstancesRequest", "RegionInstanceGroupsSetNamedPortsRequest", "RegionList", + "RegionNetworkEndpointGroupsAttachEndpointsRequest", + "RegionNetworkEndpointGroupsDetachEndpointsRequest", "RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse", "RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", "RegionSetLabelsRequest", @@ -1006,6 +1042,7 @@ "RemoveRuleFirewallPolicyRequest", "RemoveRuleNetworkFirewallPolicyRequest", "RemoveRuleRegionNetworkFirewallPolicyRequest", + "RemoveRuleRegionSecurityPolicyRequest", "RemoveRuleSecurityPolicyRequest", "RequestMirrorPolicy", "Reservation", @@ -1083,6 +1120,7 @@ "SecurityPolicy", "SecurityPolicyAdaptiveProtectionConfig", "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig", + "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig", "SecurityPolicyAdvancedOptionsConfig", "SecurityPolicyAdvancedOptionsConfigJsonCustomConfig", "SecurityPolicyDdosProtectionConfig", @@ -1094,6 +1132,8 @@ "SecurityPolicyRuleHttpHeaderActionHttpHeaderOption", "SecurityPolicyRuleMatcher", "SecurityPolicyRuleMatcherConfig", + "SecurityPolicyRuleNetworkMatcher", + "SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch", "SecurityPolicyRulePreconfiguredWafConfig", "SecurityPolicyRulePreconfiguredWafConfigExclusion", "SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams", @@ -1101,6 +1141,7 @@ "SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig", "SecurityPolicyRuleRateLimitOptionsThreshold", "SecurityPolicyRuleRedirectOptions", + "SecurityPolicyUserDefinedField", "SecuritySettings", "SendDiagnosticInterruptInstanceRequest", "SendDiagnosticInterruptInstanceResponse", @@ -1118,12 +1159,15 @@ "SetBackupTargetPoolRequest", "SetCertificateMapTargetHttpsProxyRequest", "SetCertificateMapTargetSslProxyRequest", + "SetCommonInstanceMetadataOperationMetadata", + "SetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo", "SetCommonInstanceMetadataProjectRequest", "SetDefaultNetworkTierProjectRequest", "SetDeletionProtectionInstanceRequest", "SetDiskAutoDeleteInstanceRequest", "SetEdgeSecurityPolicyBackendBucketRequest", "SetEdgeSecurityPolicyBackendServiceRequest", + "SetIamPolicyBackendBucketRequest", "SetIamPolicyBackendServiceRequest", "SetIamPolicyDiskRequest", "SetIamPolicyFirewallPolicyRequest", @@ -1176,6 +1220,10 @@ "SetQuicOverrideTargetHttpsProxyRequest", "SetSchedulingInstanceRequest", "SetSecurityPolicyBackendServiceRequest", + "SetSecurityPolicyInstanceRequest", + "SetSecurityPolicyRegionBackendServiceRequest", + "SetSecurityPolicyTargetInstanceRequest", + "SetSecurityPolicyTargetPoolRequest", "SetServiceAccountInstanceRequest", "SetShieldedInstanceIntegrityPolicyInstanceRequest", "SetSslCertificatesRegionTargetHttpsProxyRequest", @@ -1204,6 +1252,9 @@ "SimulateMaintenanceEventNodeGroupRequest", "Snapshot", "SnapshotList", + "SnapshotSettings", + "SnapshotSettingsStorageLocationSettings", + "SnapshotSettingsStorageLocationSettingsStorageLocationPreference", "SourceDiskEncryptionKey", "SourceInstanceParams", "SourceInstanceProperties", @@ -1226,6 +1277,8 @@ "StatefulPolicy", "StatefulPolicyPreservedState", "StatefulPolicyPreservedStateDiskDevice", + "StatefulPolicyPreservedStateNetworkIp", + "Status", "StopAsyncReplicationDiskRequest", "StopAsyncReplicationRegionDiskRequest", "StopGroupAsyncReplicationDiskRequest", @@ -1288,6 +1341,8 @@ "TargetVpnGatewayList", "TargetVpnGatewaysScopedList", "TestFailure", + "TestIamPermissionsBackendBucketRequest", + "TestIamPermissionsBackendServiceRequest", "TestIamPermissionsDiskRequest", "TestIamPermissionsExternalVpnGatewayRequest", "TestIamPermissionsFirewallPolicyRequest", @@ -1303,6 +1358,7 @@ "TestIamPermissionsNodeGroupRequest", "TestIamPermissionsNodeTemplateRequest", "TestIamPermissionsPacketMirroringRequest", + "TestIamPermissionsRegionBackendServiceRequest", "TestIamPermissionsRegionDiskRequest", "TestIamPermissionsRegionNetworkFirewallPolicyRequest", "TestIamPermissionsReservationRequest", @@ -1314,6 +1370,7 @@ "TestPermissionsRequest", "TestPermissionsResponse", "Uint128", + "UpcomingMaintenance", "UpdateAccessConfigInstanceRequest", "UpdateAutoscalerRequest", "UpdateBackendBucketRequest", @@ -1380,6 +1437,8 @@ "Warning", "Warnings", "WeightedBackendService", + "WithdrawPublicAdvertisedPrefixeRequest", + "WithdrawPublicDelegatedPrefixeRequest", "XpnHostList", "XpnResourceId", "Zone", @@ -1390,10 +1449,72 @@ ) -class AbandonInstancesInstanceGroupManagerRequest(proto.Message): +class AWSV4Signature(proto.Message): r"""Messages - A request message for InstanceGroupManagers.AbandonInstances. + Contains the configurations necessary to generate a signature + for access to private storage buckets that support Signature + Version 4 for authentication. The service name for generating + the authentication header will always default to 's3'. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + access_key (str): + The access key used for s3 bucket + authentication. Required for updating or + creating a backend that uses AWS v4 signature + authentication, but will not be returned as part + of the configuration when queried with a REST + API GET request. @InputOnly + + This field is a member of `oneof`_ ``_access_key``. + access_key_id (str): + The identifier of an access key used for s3 + bucket authentication. + + This field is a member of `oneof`_ ``_access_key_id``. + access_key_version (str): + The optional version identifier for the + access key. You can use this to keep track of + different iterations of your access key. + + This field is a member of `oneof`_ ``_access_key_version``. + origin_region (str): + The name of the cloud region of your origin. + This is a free-form field with the name of the + region your cloud uses to host your origin. For + example, "us-east-1" for AWS or "us-ashburn-1" + for OCI. + + This field is a member of `oneof`_ ``_origin_region``. + """ + + access_key: str = proto.Field( + proto.STRING, + number=468922628, + optional=True, + ) + access_key_id: str = proto.Field( + proto.STRING, + number=292975158, + optional=True, + ) + access_key_version: str = proto.Field( + proto.STRING, + number=80261277, + optional=True, + ) + origin_region: str = proto.Field( + proto.STRING, + number=265305645, + optional=True, + ) + + +class AbandonInstancesInstanceGroupManagerRequest(proto.Message): + r"""A request message for InstanceGroupManagers.AbandonInstances. See the method description for details. @@ -1948,6 +2069,11 @@ class AccessConfig(proto.Message): first IP in associated external IPv6 range. This field is a member of `oneof`_ ``_public_ptr_domain_name``. + security_policy (str): + [Output Only] The resource URL for the security policy + associated with this access config. + + This field is a member of `oneof`_ ``_security_policy``. set_public_ptr (bool): Specifies whether a public DNS 'PTR' record should be created to map the external IP address @@ -2051,6 +2177,11 @@ class Type(proto.Enum): number=316599167, optional=True, ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + optional=True, + ) set_public_ptr: bool = proto.Field( proto.BOOL, number=523870229, @@ -3024,6 +3155,52 @@ class AddRuleRegionNetworkFirewallPolicyRequest(proto.Message): ) +class AddRuleRegionSecurityPolicyRequest(proto.Message): + r"""A request message for RegionSecurityPolicies.AddRule. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + security_policy (str): + Name of the security policy to update. + security_policy_rule_resource (google.cloud.compute_v1.types.SecurityPolicyRule): + The body resource for this request + validate_only (bool): + If true, the request will not be committed. + + This field is a member of `oneof`_ ``_validate_only``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + ) + security_policy_rule_resource: "SecurityPolicyRule" = proto.Field( + proto.MESSAGE, + number=402693443, + message="SecurityPolicyRule", + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=242744629, + optional=True, + ) + + class AddRuleSecurityPolicyRequest(proto.Message): r"""A request message for SecurityPolicies.AddRule. See the method description for details. @@ -3890,16 +4067,15 @@ class AggregatedListAcceleratorTypesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -3926,7 +4102,8 @@ class AggregatedListAcceleratorTypesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -3978,6 +4155,9 @@ class AggregatedListAcceleratorTypesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -4014,6 +4194,11 @@ class AggregatedListAcceleratorTypesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AggregatedListAddressesRequest(proto.Message): @@ -4029,16 +4214,15 @@ class AggregatedListAddressesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -4065,7 +4249,8 @@ class AggregatedListAddressesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -4117,6 +4302,9 @@ class AggregatedListAddressesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -4153,6 +4341,11 @@ class AggregatedListAddressesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AggregatedListAutoscalersRequest(proto.Message): @@ -4168,16 +4361,15 @@ class AggregatedListAutoscalersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -4204,7 +4396,8 @@ class AggregatedListAutoscalersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -4256,6 +4449,9 @@ class AggregatedListAutoscalersRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -4292,6 +4488,11 @@ class AggregatedListAutoscalersRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AggregatedListBackendServicesRequest(proto.Message): @@ -4307,16 +4508,15 @@ class AggregatedListBackendServicesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -4343,7 +4543,8 @@ class AggregatedListBackendServicesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -4395,6 +4596,9 @@ class AggregatedListBackendServicesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -4431,6 +4635,11 @@ class AggregatedListBackendServicesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AggregatedListDiskTypesRequest(proto.Message): @@ -4446,16 +4655,15 @@ class AggregatedListDiskTypesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -4482,7 +4690,8 @@ class AggregatedListDiskTypesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -4534,6 +4743,9 @@ class AggregatedListDiskTypesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -4570,6 +4782,11 @@ class AggregatedListDiskTypesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AggregatedListDisksRequest(proto.Message): @@ -4585,16 +4802,15 @@ class AggregatedListDisksRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -4621,7 +4837,8 @@ class AggregatedListDisksRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -4673,6 +4890,9 @@ class AggregatedListDisksRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -4709,6 +4929,11 @@ class AggregatedListDisksRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AggregatedListForwardingRulesRequest(proto.Message): @@ -4724,16 +4949,15 @@ class AggregatedListForwardingRulesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -4760,7 +4984,8 @@ class AggregatedListForwardingRulesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -4812,6 +5037,9 @@ class AggregatedListForwardingRulesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -4848,6 +5076,11 @@ class AggregatedListForwardingRulesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AggregatedListGlobalOperationsRequest(proto.Message): @@ -4863,16 +5096,15 @@ class AggregatedListGlobalOperationsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -4899,7 +5131,8 @@ class AggregatedListGlobalOperationsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -4951,6 +5184,9 @@ class AggregatedListGlobalOperationsRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -4987,6 +5223,11 @@ class AggregatedListGlobalOperationsRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AggregatedListHealthChecksRequest(proto.Message): @@ -5002,16 +5243,15 @@ class AggregatedListHealthChecksRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -5038,7 +5278,8 @@ class AggregatedListHealthChecksRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -5090,6 +5331,9 @@ class AggregatedListHealthChecksRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -5126,6 +5370,11 @@ class AggregatedListHealthChecksRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AggregatedListInstanceGroupManagersRequest(proto.Message): @@ -5141,16 +5390,15 @@ class AggregatedListInstanceGroupManagersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -5177,7 +5425,8 @@ class AggregatedListInstanceGroupManagersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -5229,6 +5478,9 @@ class AggregatedListInstanceGroupManagersRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -5265,6 +5517,11 @@ class AggregatedListInstanceGroupManagersRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AggregatedListInstanceGroupsRequest(proto.Message): @@ -5280,16 +5537,15 @@ class AggregatedListInstanceGroupsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -5316,7 +5572,8 @@ class AggregatedListInstanceGroupsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -5368,6 +5625,9 @@ class AggregatedListInstanceGroupsRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -5404,6 +5664,11 @@ class AggregatedListInstanceGroupsRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AggregatedListInstanceTemplatesRequest(proto.Message): @@ -5419,16 +5684,15 @@ class AggregatedListInstanceTemplatesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -5455,7 +5719,8 @@ class AggregatedListInstanceTemplatesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -5507,6 +5772,9 @@ class AggregatedListInstanceTemplatesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -5543,6 +5811,11 @@ class AggregatedListInstanceTemplatesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AggregatedListInstancesRequest(proto.Message): @@ -5558,16 +5831,15 @@ class AggregatedListInstancesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -5594,7 +5866,8 @@ class AggregatedListInstancesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -5646,6 +5919,9 @@ class AggregatedListInstancesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -5682,6 +5958,11 @@ class AggregatedListInstancesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AggregatedListInterconnectAttachmentsRequest(proto.Message): @@ -5697,16 +5978,15 @@ class AggregatedListInterconnectAttachmentsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -5733,7 +6013,8 @@ class AggregatedListInterconnectAttachmentsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -5785,145 +6066,9 @@ class AggregatedListInterconnectAttachmentsRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. - """ + service_project_number (int): - filter: str = proto.Field( - proto.STRING, - number=336120696, - optional=True, - ) - include_all_scopes: bool = proto.Field( - proto.BOOL, - number=391327988, - optional=True, - ) - max_results: int = proto.Field( - proto.UINT32, - number=54715419, - optional=True, - ) - order_by: str = proto.Field( - proto.STRING, - number=160562920, - optional=True, - ) - page_token: str = proto.Field( - proto.STRING, - number=19994697, - optional=True, - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - return_partial_success: bool = proto.Field( - proto.BOOL, - number=517198390, - optional=True, - ) - - -class AggregatedListMachineTypesRequest(proto.Message): - r"""A request message for MachineTypes.AggregatedList. See the - method description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. Most Compute resources support two types of filter - expressions: expressions that support regular expressions - and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` - comparison can be used to test whether a key has been - defined. For example, to find all objects with ``owner`` - label use: ``labels.owner:*`` You can also filter nested - fields. For example, you could specify - ``scheduling.automaticRestart = false`` to include instances - only if they are not scheduled for automatic restarts. You - can use filtering on nested fields to filter based on - resource labels. To filter on multiple expressions, provide - each separate expression within parentheses. For example: - ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` - If you want to use a regular expression, use the ``eq`` - (equal) or ``ne`` (not equal) operator against a single - un-parenthesized expression with or without quotes or - against multiple parenthesized expressions. Examples: - ``fieldname eq unquoted literal`` - ``fieldname eq 'single quoted literal'`` - ``fieldname eq "double quoted literal"`` - ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The - literal value is interpreted as a regular expression using - Google RE2 library syntax. The literal value must match the - entire field. For example, to filter for instances that do - not end with name "instance", you would use - ``name ne .*instance``. - - This field is a member of `oneof`_ ``_filter``. - include_all_scopes (bool): - Indicates whether every visible scope for - each scope type (zone, region, global) should be - included in the response. For new resource types - added after this field, the flag has no effect - as new resource types will always include every - visible scope for each scope type in response. - For resource types which predate this field, if - this flag is omitted or false, only scopes of - the scope types where the resource type is - expected to be found will be included. - - This field is a member of `oneof`_ ``_include_all_scopes``. - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) - - This field is a member of `oneof`_ ``_max_results``. - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. You can also sort results in descending order based on - the creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. - - This field is a member of `oneof`_ ``_order_by``. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - - This field is a member of `oneof`_ ``_page_token``. - project (str): - Project ID for this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - - This field is a member of `oneof`_ ``_return_partial_success``. + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -5960,11 +6105,16 @@ class AggregatedListMachineTypesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListNetworkAttachmentsRequest(proto.Message): - r"""A request message for NetworkAttachments.AggregatedList. See - the method description for details. +class AggregatedListMachineTypesRequest(proto.Message): + r"""A request message for MachineTypes.AggregatedList. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -5975,16 +6125,15 @@ class AggregatedListNetworkAttachmentsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -6011,7 +6160,8 @@ class AggregatedListNetworkAttachmentsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -6063,6 +6213,9 @@ class AggregatedListNetworkAttachmentsRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -6099,12 +6252,16 @@ class AggregatedListNetworkAttachmentsRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListNetworkEdgeSecurityServicesRequest(proto.Message): - r"""A request message for - NetworkEdgeSecurityServices.AggregatedList. See the method - description for details. +class AggregatedListNetworkAttachmentsRequest(proto.Message): + r"""A request message for NetworkAttachments.AggregatedList. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -6115,16 +6272,15 @@ class AggregatedListNetworkEdgeSecurityServicesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -6151,7 +6307,8 @@ class AggregatedListNetworkEdgeSecurityServicesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -6196,13 +6353,16 @@ class AggregatedListNetworkEdgeSecurityServicesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Name of the project scoping this request. + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -6239,11 +6399,17 @@ class AggregatedListNetworkEdgeSecurityServicesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListNetworkEndpointGroupsRequest(proto.Message): - r"""A request message for NetworkEndpointGroups.AggregatedList. - See the method description for details. +class AggregatedListNetworkEdgeSecurityServicesRequest(proto.Message): + r"""A request message for + NetworkEdgeSecurityServices.AggregatedList. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -6254,16 +6420,15 @@ class AggregatedListNetworkEndpointGroupsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -6290,7 +6455,8 @@ class AggregatedListNetworkEndpointGroupsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -6335,13 +6501,16 @@ class AggregatedListNetworkEndpointGroupsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Project ID for this request. + Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -6378,11 +6547,16 @@ class AggregatedListNetworkEndpointGroupsRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListNodeGroupsRequest(proto.Message): - r"""A request message for NodeGroups.AggregatedList. See the - method description for details. +class AggregatedListNetworkEndpointGroupsRequest(proto.Message): + r"""A request message for NetworkEndpointGroups.AggregatedList. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -6393,16 +6567,15 @@ class AggregatedListNodeGroupsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -6429,7 +6602,8 @@ class AggregatedListNodeGroupsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -6481,6 +6655,9 @@ class AggregatedListNodeGroupsRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -6517,10 +6694,15 @@ class AggregatedListNodeGroupsRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListNodeTemplatesRequest(proto.Message): - r"""A request message for NodeTemplates.AggregatedList. See the +class AggregatedListNodeGroupsRequest(proto.Message): + r"""A request message for NodeGroups.AggregatedList. See the method description for details. @@ -6532,16 +6714,15 @@ class AggregatedListNodeTemplatesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -6568,7 +6749,8 @@ class AggregatedListNodeTemplatesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -6620,6 +6802,9 @@ class AggregatedListNodeTemplatesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -6656,10 +6841,15 @@ class AggregatedListNodeTemplatesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListNodeTypesRequest(proto.Message): - r"""A request message for NodeTypes.AggregatedList. See the +class AggregatedListNodeTemplatesRequest(proto.Message): + r"""A request message for NodeTemplates.AggregatedList. See the method description for details. @@ -6671,16 +6861,15 @@ class AggregatedListNodeTypesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -6707,7 +6896,8 @@ class AggregatedListNodeTypesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -6759,6 +6949,9 @@ class AggregatedListNodeTypesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -6795,11 +6988,16 @@ class AggregatedListNodeTypesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListPacketMirroringsRequest(proto.Message): - r"""A request message for PacketMirrorings.AggregatedList. See - the method description for details. +class AggregatedListNodeTypesRequest(proto.Message): + r"""A request message for NodeTypes.AggregatedList. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -6810,16 +7008,15 @@ class AggregatedListPacketMirroringsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -6846,7 +7043,8 @@ class AggregatedListPacketMirroringsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -6898,6 +7096,9 @@ class AggregatedListPacketMirroringsRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -6934,11 +7135,16 @@ class AggregatedListPacketMirroringsRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): - r"""A request message for PublicDelegatedPrefixes.AggregatedList. - See the method description for details. +class AggregatedListPacketMirroringsRequest(proto.Message): + r"""A request message for PacketMirrorings.AggregatedList. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -6949,16 +7155,15 @@ class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -6985,7 +7190,8 @@ class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -7030,13 +7236,16 @@ class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Name of the project scoping this request. + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -7073,11 +7282,16 @@ class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListRegionCommitmentsRequest(proto.Message): - r"""A request message for RegionCommitments.AggregatedList. See - the method description for details. +class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): + r"""A request message for PublicDelegatedPrefixes.AggregatedList. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -7088,16 +7302,15 @@ class AggregatedListRegionCommitmentsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -7124,7 +7337,8 @@ class AggregatedListRegionCommitmentsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -7169,13 +7383,16 @@ class AggregatedListRegionCommitmentsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Project ID for this request. + Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -7212,11 +7429,16 @@ class AggregatedListRegionCommitmentsRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListReservationsRequest(proto.Message): - r"""A request message for Reservations.AggregatedList. See the - method description for details. +class AggregatedListRegionCommitmentsRequest(proto.Message): + r"""A request message for RegionCommitments.AggregatedList. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -7227,16 +7449,15 @@ class AggregatedListReservationsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -7263,7 +7484,8 @@ class AggregatedListReservationsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -7315,6 +7537,9 @@ class AggregatedListReservationsRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -7351,11 +7576,16 @@ class AggregatedListReservationsRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListResourcePoliciesRequest(proto.Message): - r"""A request message for ResourcePolicies.AggregatedList. See - the method description for details. +class AggregatedListReservationsRequest(proto.Message): + r"""A request message for Reservations.AggregatedList. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -7366,16 +7596,15 @@ class AggregatedListResourcePoliciesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -7402,7 +7631,8 @@ class AggregatedListResourcePoliciesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -7454,6 +7684,9 @@ class AggregatedListResourcePoliciesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -7490,11 +7723,16 @@ class AggregatedListResourcePoliciesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListRoutersRequest(proto.Message): - r"""A request message for Routers.AggregatedList. See the method - description for details. +class AggregatedListResourcePoliciesRequest(proto.Message): + r"""A request message for ResourcePolicies.AggregatedList. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -7505,16 +7743,15 @@ class AggregatedListRoutersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -7541,7 +7778,8 @@ class AggregatedListRoutersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -7593,6 +7831,9 @@ class AggregatedListRoutersRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -7629,11 +7870,16 @@ class AggregatedListRoutersRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListSecurityPoliciesRequest(proto.Message): - r"""A request message for SecurityPolicies.AggregatedList. See - the method description for details. +class AggregatedListRoutersRequest(proto.Message): + r"""A request message for Routers.AggregatedList. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -7644,16 +7890,15 @@ class AggregatedListSecurityPoliciesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -7680,7 +7925,8 @@ class AggregatedListSecurityPoliciesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -7725,13 +7971,16 @@ class AggregatedListSecurityPoliciesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Name of the project scoping this request. + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -7768,10 +8017,15 @@ class AggregatedListSecurityPoliciesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListServiceAttachmentsRequest(proto.Message): - r"""A request message for ServiceAttachments.AggregatedList. See +class AggregatedListSecurityPoliciesRequest(proto.Message): + r"""A request message for SecurityPolicies.AggregatedList. See the method description for details. @@ -7783,16 +8037,15 @@ class AggregatedListServiceAttachmentsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -7819,7 +8072,8 @@ class AggregatedListServiceAttachmentsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -7871,6 +8125,9 @@ class AggregatedListServiceAttachmentsRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -7907,11 +8164,16 @@ class AggregatedListServiceAttachmentsRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListSslCertificatesRequest(proto.Message): - r"""A request message for SslCertificates.AggregatedList. See the - method description for details. +class AggregatedListServiceAttachmentsRequest(proto.Message): + r"""A request message for ServiceAttachments.AggregatedList. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -7922,16 +8184,15 @@ class AggregatedListSslCertificatesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -7958,7 +8219,8 @@ class AggregatedListSslCertificatesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -8010,6 +8272,9 @@ class AggregatedListSslCertificatesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -8046,10 +8311,15 @@ class AggregatedListSslCertificatesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListSslPoliciesRequest(proto.Message): - r"""A request message for SslPolicies.AggregatedList. See the +class AggregatedListSslCertificatesRequest(proto.Message): + r"""A request message for SslCertificates.AggregatedList. See the method description for details. @@ -8061,16 +8331,15 @@ class AggregatedListSslPoliciesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -8097,7 +8366,8 @@ class AggregatedListSslPoliciesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -8149,6 +8419,9 @@ class AggregatedListSslPoliciesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -8185,10 +8458,15 @@ class AggregatedListSslPoliciesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListSubnetworksRequest(proto.Message): - r"""A request message for Subnetworks.AggregatedList. See the +class AggregatedListSslPoliciesRequest(proto.Message): + r"""A request message for SslPolicies.AggregatedList. See the method description for details. @@ -8200,16 +8478,15 @@ class AggregatedListSubnetworksRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -8236,7 +8513,8 @@ class AggregatedListSubnetworksRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -8281,13 +8559,16 @@ class AggregatedListSubnetworksRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Project ID for this request. + Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -8324,11 +8605,16 @@ class AggregatedListSubnetworksRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListTargetHttpProxiesRequest(proto.Message): - r"""A request message for TargetHttpProxies.AggregatedList. See - the method description for details. +class AggregatedListSubnetworksRequest(proto.Message): + r"""A request message for Subnetworks.AggregatedList. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -8339,16 +8625,15 @@ class AggregatedListTargetHttpProxiesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -8375,7 +8660,8 @@ class AggregatedListTargetHttpProxiesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -8420,13 +8706,16 @@ class AggregatedListTargetHttpProxiesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Name of the project scoping this request. + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -8463,10 +8752,15 @@ class AggregatedListTargetHttpProxiesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListTargetHttpsProxiesRequest(proto.Message): - r"""A request message for TargetHttpsProxies.AggregatedList. See +class AggregatedListTargetHttpProxiesRequest(proto.Message): + r"""A request message for TargetHttpProxies.AggregatedList. See the method description for details. @@ -8478,16 +8772,15 @@ class AggregatedListTargetHttpsProxiesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -8514,7 +8807,8 @@ class AggregatedListTargetHttpsProxiesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -8566,6 +8860,9 @@ class AggregatedListTargetHttpsProxiesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -8602,11 +8899,16 @@ class AggregatedListTargetHttpsProxiesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListTargetInstancesRequest(proto.Message): - r"""A request message for TargetInstances.AggregatedList. See the - method description for details. +class AggregatedListTargetHttpsProxiesRequest(proto.Message): + r"""A request message for TargetHttpsProxies.AggregatedList. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -8617,16 +8919,15 @@ class AggregatedListTargetInstancesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -8653,7 +8954,8 @@ class AggregatedListTargetInstancesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -8698,13 +9000,16 @@ class AggregatedListTargetInstancesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Project ID for this request. + Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -8741,10 +9046,15 @@ class AggregatedListTargetInstancesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListTargetPoolsRequest(proto.Message): - r"""A request message for TargetPools.AggregatedList. See the +class AggregatedListTargetInstancesRequest(proto.Message): + r"""A request message for TargetInstances.AggregatedList. See the method description for details. @@ -8756,16 +9066,15 @@ class AggregatedListTargetPoolsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -8792,7 +9101,8 @@ class AggregatedListTargetPoolsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -8844,6 +9154,9 @@ class AggregatedListTargetPoolsRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -8880,11 +9193,16 @@ class AggregatedListTargetPoolsRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListTargetTcpProxiesRequest(proto.Message): - r"""A request message for TargetTcpProxies.AggregatedList. See - the method description for details. +class AggregatedListTargetPoolsRequest(proto.Message): + r"""A request message for TargetPools.AggregatedList. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -8895,16 +9213,15 @@ class AggregatedListTargetTcpProxiesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -8931,7 +9248,8 @@ class AggregatedListTargetTcpProxiesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -8976,13 +9294,16 @@ class AggregatedListTargetTcpProxiesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Name of the project scoping this request. + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -9019,10 +9340,15 @@ class AggregatedListTargetTcpProxiesRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListTargetVpnGatewaysRequest(proto.Message): - r"""A request message for TargetVpnGateways.AggregatedList. See +class AggregatedListTargetTcpProxiesRequest(proto.Message): + r"""A request message for TargetTcpProxies.AggregatedList. See the method description for details. @@ -9034,16 +9360,15 @@ class AggregatedListTargetVpnGatewaysRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -9070,7 +9395,8 @@ class AggregatedListTargetVpnGatewaysRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -9115,13 +9441,16 @@ class AggregatedListTargetVpnGatewaysRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Project ID for this request. + Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -9158,11 +9487,16 @@ class AggregatedListTargetVpnGatewaysRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListUrlMapsRequest(proto.Message): - r"""A request message for UrlMaps.AggregatedList. See the method - description for details. +class AggregatedListTargetVpnGatewaysRequest(proto.Message): + r"""A request message for TargetVpnGateways.AggregatedList. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -9173,16 +9507,15 @@ class AggregatedListUrlMapsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -9209,7 +9542,8 @@ class AggregatedListUrlMapsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -9254,13 +9588,16 @@ class AggregatedListUrlMapsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Name of the project scoping this request. + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -9297,11 +9634,16 @@ class AggregatedListUrlMapsRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) -class AggregatedListVpnGatewaysRequest(proto.Message): - r"""A request message for VpnGateways.AggregatedList. See the - method description for details. +class AggregatedListUrlMapsRequest(proto.Message): + r"""A request message for UrlMaps.AggregatedList. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -9312,16 +9654,15 @@ class AggregatedListVpnGatewaysRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -9348,7 +9689,155 @@ class AggregatedListVpnGatewaysRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) + + +class AggregatedListVpnGatewaysRequest(proto.Message): + r"""A request message for VpnGateways.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -9400,6 +9889,9 @@ class AggregatedListVpnGatewaysRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -9436,6 +9928,11 @@ class AggregatedListVpnGatewaysRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AggregatedListVpnTunnelsRequest(proto.Message): @@ -9451,16 +9948,15 @@ class AggregatedListVpnTunnelsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -9487,7 +9983,8 @@ class AggregatedListVpnTunnelsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): @@ -9539,6 +10036,9 @@ class AggregatedListVpnTunnelsRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + + This field is a member of `oneof`_ ``_service_project_number``. """ filter: str = proto.Field( @@ -9575,6 +10075,11 @@ class AggregatedListVpnTunnelsRequest(proto.Message): number=517198390, optional=True, ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) class AliasIpRange(proto.Message): @@ -9879,6 +10384,110 @@ class Allowed(proto.Message): ) +class AnnouncePublicAdvertisedPrefixeRequest(proto.Message): + r"""A request message for PublicAdvertisedPrefixes.Announce. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + public_advertised_prefix (str): + The name of the public advertised prefix. It + should comply with RFC1035. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_advertised_prefix: str = proto.Field( + proto.STRING, + number=101874590, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class AnnouncePublicDelegatedPrefixeRequest(proto.Message): + r"""A request message for PublicDelegatedPrefixes.Announce. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + public_delegated_prefix (str): + The name of the public delegated prefix. It + should comply with RFC1035. + region (str): + The name of the region where the public + delegated prefix is located. It should comply + with RFC1035. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_delegated_prefix: str = proto.Field( + proto.STRING, + number=204238440, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + class ApplyUpdatesToInstancesInstanceGroupManagerRequest(proto.Message): r"""A request message for InstanceGroupManagers.ApplyUpdatesToInstances. See the method @@ -10148,6 +10757,71 @@ class AttachNetworkEndpointsNetworkEndpointGroupRequest(proto.Message): ) +class AttachNetworkEndpointsRegionNetworkEndpointGroupRequest(proto.Message): + r"""A request message for + RegionNetworkEndpointGroups.AttachNetworkEndpoints. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_endpoint_group (str): + The name of the network endpoint group where + you are attaching network endpoints to. It + should comply with RFC1035. + project (str): + Project ID for this request. + region (str): + The name of the region where you want to + create the network endpoint group. It should + comply with RFC1035. + region_network_endpoint_groups_attach_endpoints_request_resource (google.cloud.compute_v1.types.RegionNetworkEndpointGroupsAttachEndpointsRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_network_endpoint_groups_attach_endpoints_request_resource: "RegionNetworkEndpointGroupsAttachEndpointsRequest" = proto.Field( + proto.MESSAGE, + number=334986492, + message="RegionNetworkEndpointGroupsAttachEndpointsRequest", + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + class AttachedDisk(proto.Message): r"""An instance-attached disk resource. @@ -10591,8 +11265,7 @@ class AttachedDiskInitializeParams(proto.Message): with the instance. Specify the URLs of the zones where the disk should be replicated to. You must provide exactly two replica zones, and one zone - must be the same as the instance zone. You can't - use this option with boot disks. + must be the same as the instance zone. resource_manager_tags (MutableMapping[str, str]): Resource manager tags to be bound to the disk. Tag keys and values have the same definition as resource manager tags. @@ -12056,8 +12729,8 @@ class AutoscalingPolicyScalingSchedule(proto.Message): time_zone (str): The time zone to use when interpreting the schedule. The value of this field must be a time zone name from the tz - database: http://en.wikipedia.org/wiki/Tz_database. This - field is assigned a default value of “UTC” if left empty. + database: https://en.wikipedia.org/wiki/Tz_database. This + field is assigned a default value of "UTC" if left empty. This field is a member of `oneof`_ ``_time_zone``. """ @@ -13088,9 +13761,10 @@ class BackendService(proto.Message): service_protocol set to HTTP, HTTPS, or HTTP2, and load_balancing_scheme set to INTERNAL_MANAGED. - A global backend service with the load_balancing_scheme set to - INTERNAL_SELF_MANAGED. If sessionAffinity is not NONE, and - this field is not set to MAGLEV or RING_HASH, session - affinity settings will not take effect. Only ROUND_ROBIN and + INTERNAL_SELF_MANAGED, INTERNAL_MANAGED, or + EXTERNAL_MANAGED. If sessionAffinity is not NONE, and this + field is not set to MAGLEV or RING_HASH, session affinity + settings will not take effect. Only ROUND_ROBIN and RING_HASH are supported when the backend service is referenced by a URL map that is bound to target gRPC proxy that has validateForProxyless field set to true. Check the @@ -13260,6 +13934,8 @@ class BackendService(proto.Message): set to true. Instead, use maxStreamDuration. This field is a member of `oneof`_ ``_timeout_sec``. + used_by (MutableSequence[google.cloud.compute_v1.types.BackendServiceUsedBy]): + """ class CompressionMode(proto.Enum): @@ -13341,11 +14017,12 @@ class LocalityLbPolicy(proto.Enum): either: - A regional backend service with the service_protocol set to HTTP, HTTPS, or HTTP2, and load_balancing_scheme set to INTERNAL_MANAGED. - A global backend service with the - load_balancing_scheme set to INTERNAL_SELF_MANAGED. If - sessionAffinity is not NONE, and this field is not set to MAGLEV or - RING_HASH, session affinity settings will not take effect. Only - ROUND_ROBIN and RING_HASH are supported when the backend service is - referenced by a URL map that is bound to target gRPC proxy that has + load_balancing_scheme set to INTERNAL_SELF_MANAGED, + INTERNAL_MANAGED, or EXTERNAL_MANAGED. If sessionAffinity is not + NONE, and this field is not set to MAGLEV or RING_HASH, session + affinity settings will not take effect. Only ROUND_ROBIN and + RING_HASH are supported when the backend service is referenced by a + URL map that is bound to target gRPC proxy that has validateForProxyless field set to true. Values: @@ -13728,6 +14405,11 @@ class SessionAffinity(proto.Enum): number=79994995, optional=True, ) + used_by: MutableSequence["BackendServiceUsedBy"] = proto.RepeatedField( + proto.MESSAGE, + number=389320729, + message="BackendServiceUsedBy", + ) class BackendServiceAggregatedList(proto.Message): @@ -14404,8 +15086,7 @@ class BackendServiceIAP(proto.Message): enabled (bool): Whether the serving infrastructure will authenticate and authorize all incoming - requests. If true, the oauth2ClientId and - oauth2ClientSecret fields must be non-empty. + requests. This field is a member of `oneof`_ ``_enabled``. oauth2_client_id (str): @@ -14524,6 +15205,81 @@ def raw_page(self): ) +class BackendServiceListUsable(proto.Message): + r"""Contains a list of usable BackendService resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.BackendService]): + A list of BackendService resources. + kind (str): + [Output Only] Type of resource. Always + compute#usableBackendServiceList for lists of usable backend + services. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence["BackendService"] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message="BackendService", + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + class BackendServiceLocalityLoadBalancingPolicyConfig(proto.Message): r"""Container for either a built-in LB policy supported by gRPC or Envoy or a custom one implemented by the end user. @@ -14794,6 +15550,24 @@ class BackendServiceReference(proto.Message): ) +class BackendServiceUsedBy(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + reference (str): + + This field is a member of `oneof`_ ``_reference``. + """ + + reference: str = proto.Field( + proto.STRING, + number=148586315, + optional=True, + ) + + class BackendServicesScopedList(proto.Message): r""" @@ -15702,6 +16476,11 @@ class BulkInsertInstanceResourcePerInstanceProperties(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + hostname (str): + Specifies the hostname of the instance. More details in: + https://cloud.google.com/compute/docs/instances/custom-hostname-vm#naming_convention + + This field is a member of `oneof`_ ``_hostname``. name (str): This field is only temporary. It will be removed. Do not use it. @@ -15709,6 +16488,11 @@ class BulkInsertInstanceResourcePerInstanceProperties(proto.Message): This field is a member of `oneof`_ ``_name``. """ + hostname: str = proto.Field( + proto.STRING, + number=237067315, + optional=True, + ) name: str = proto.Field( proto.STRING, number=3373707, @@ -15716,6 +16500,87 @@ class BulkInsertInstanceResourcePerInstanceProperties(proto.Message): ) +class BulkInsertOperationStatus(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + created_vm_count (int): + [Output Only] Count of VMs successfully created so far. + + This field is a member of `oneof`_ ``_created_vm_count``. + deleted_vm_count (int): + [Output Only] Count of VMs that got deleted during rollback. + + This field is a member of `oneof`_ ``_deleted_vm_count``. + failed_to_create_vm_count (int): + [Output Only] Count of VMs that started creating but + encountered an error. + + This field is a member of `oneof`_ ``_failed_to_create_vm_count``. + status (str): + [Output Only] Creation status of BulkInsert operation - + information if the flow is rolling forward or rolling back. + Check the Status enum for the list of possible values. + + This field is a member of `oneof`_ ``_status``. + target_vm_count (int): + [Output Only] Count of VMs originally planned to be created. + + This field is a member of `oneof`_ ``_target_vm_count``. + """ + + class Status(proto.Enum): + r"""[Output Only] Creation status of BulkInsert operation - information + if the flow is rolling forward or rolling back. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + CREATING (455564985): + Rolling forward - creating VMs. + DONE (2104194): + Done + ROLLING_BACK (259411649): + Rolling back - cleaning up after an error. + STATUS_UNSPECIFIED (42133066): + No description available. + """ + UNDEFINED_STATUS = 0 + CREATING = 455564985 + DONE = 2104194 + ROLLING_BACK = 259411649 + STATUS_UNSPECIFIED = 42133066 + + created_vm_count: int = proto.Field( + proto.INT32, + number=396924158, + optional=True, + ) + deleted_vm_count: int = proto.Field( + proto.INT32, + number=271756013, + optional=True, + ) + failed_to_create_vm_count: int = proto.Field( + proto.INT32, + number=58384104, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + target_vm_count: int = proto.Field( + proto.INT32, + number=532975733, + optional=True, + ) + + class BulkInsertRegionDiskRequest(proto.Message): r"""A request message for RegionDisks.BulkInsert. See the method description for details. @@ -16275,7 +17140,8 @@ class Commitment(proto.Message): This field is a member of `oneof`_ ``_region``. reservations (MutableSequence[google.cloud.compute_v1.types.Reservation]): - List of reservations in this commitment. + List of create-on-create reseravtions for + this commitment. resources (MutableSequence[google.cloud.compute_v1.types.ResourceCommitment]): A list of commitment amounts for particular resources. Note that VCPU and MEMORY resource @@ -16404,12 +17270,18 @@ class Type(proto.Enum): set. ACCELERATOR_OPTIMIZED (280848403): No description available. + ACCELERATOR_OPTIMIZED_A3 (158574526): + No description available. COMPUTE_OPTIMIZED (158349023): No description available. COMPUTE_OPTIMIZED_C2D (383246453): No description available. COMPUTE_OPTIMIZED_C3 (428004784): No description available. + COMPUTE_OPTIMIZED_C3D (383246484): + No description available. + COMPUTE_OPTIMIZED_H3 (428004939): + No description available. GENERAL_PURPOSE (299793543): No description available. GENERAL_PURPOSE_E2 (301911877): @@ -16431,9 +17303,12 @@ class Type(proto.Enum): """ UNDEFINED_TYPE = 0 ACCELERATOR_OPTIMIZED = 280848403 + ACCELERATOR_OPTIMIZED_A3 = 158574526 COMPUTE_OPTIMIZED = 158349023 COMPUTE_OPTIMIZED_C2D = 383246453 COMPUTE_OPTIMIZED_C3 = 428004784 + COMPUTE_OPTIMIZED_C3D = 383246484 + COMPUTE_OPTIMIZED_H3 = 428004939 GENERAL_PURPOSE = 299793543 GENERAL_PURPOSE_E2 = 301911877 GENERAL_PURPOSE_N2 = 301912156 @@ -22091,6 +22966,69 @@ class DetachNetworkEndpointsNetworkEndpointGroupRequest(proto.Message): ) +class DetachNetworkEndpointsRegionNetworkEndpointGroupRequest(proto.Message): + r"""A request message for + RegionNetworkEndpointGroups.DetachNetworkEndpoints. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_endpoint_group (str): + The name of the network endpoint group you + are detaching network endpoints from. It should + comply with RFC1035. + project (str): + Project ID for this request. + region (str): + The name of the region where the network + endpoint group is located. It should comply with + RFC1035. + region_network_endpoint_groups_detach_endpoints_request_resource (google.cloud.compute_v1.types.RegionNetworkEndpointGroupsDetachEndpointsRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. Specify a + unique request ID so that if you must retry your request, + the server will know to ignore the request if it has already + been completed. For example, consider a situation where you + make an initial request and the request times out. If you + make the request again with the same request ID, the server + can check if original operation with the same request ID was + received, and if so, will ignore the second request. This + prevents clients from accidentally creating duplicate + commitments. The request ID must be a valid UUID with the + exception that zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). end_interface: + MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. + """ + + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_network_endpoint_groups_detach_endpoints_request_resource: "RegionNetworkEndpointGroupsDetachEndpointsRequest" = proto.Field( + proto.MESSAGE, + number=313193198, + message="RegionNetworkEndpointGroupsDetachEndpointsRequest", + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + class DisableXpnHostProjectRequest(proto.Message): r"""A request message for Projects.DisableXpnHost. See the method description for details. @@ -22374,8 +23312,7 @@ class Disk(proto.Message): create an empty persistent disk. If you specify this field along with a source, the value of sizeGb must not be less than the size of the - source. Acceptable values are 1 to 65536, - inclusive. + source. Acceptable values are greater than 0. This field is a member of `oneof`_ ``_size_gb``. source_consistency_group_policy (str): @@ -26065,15 +27002,21 @@ class ForwardingRule(proto.Message): This field is a member of `oneof`_ ``_I_p_protocol``. all_ports (bool): - This field can only be used: - If IPProtocol is one of TCP, - UDP, or SCTP. - By internal TCP/UDP load balancers, backend - service-based network load balancers, and internal and - external protocol forwarding. Set this field to true to - allow packets addressed to any port or packets lacking + The ports, portRange, and allPorts fields are mutually + exclusive. Only packets addressed to ports in the specified + range will be forwarded to the backends configured with this + forwarding rule. The allPorts field has the following + limitations: - It requires that the forwarding rule + IPProtocol be TCP, UDP, SCTP, or L3_DEFAULT. - It's + applicable only to the following products: internal + passthrough Network Load Balancers, backend service-based + external passthrough Network Load Balancers, and internal + and external protocol forwarding. - Set this field to true + to allow packets addressed to any port or packets lacking destination port information (for example, UDP fragments after the first fragment) to be forwarded to the backends - configured with this forwarding rule. The ports, port_range, - and allPorts fields are mutually exclusive. + configured with this forwarding rule. The L3_DEFAULT + protocol requires allPorts be set to true. This field is a member of `oneof`_ ``_all_ports``. allow_global_access (bool): @@ -26224,17 +27167,17 @@ class ForwardingRule(proto.Message): This field is a member of `oneof`_ ``_name``. network (str): - This field is not used for external load - balancing. For Internal TCP/UDP Load Balancing, - this field identifies the network that the load - balanced IP should belong to for this Forwarding - Rule. If the subnetwork is specified, the - network of the subnetwork will be used. If - neither subnetwork nor this field is specified, - the default network will be used. For Private - Service Connect forwarding rules that forward - traffic to Google APIs, a network must be - provided. + This field is not used for global external + load balancing. For Internal TCP/UDP Load + Balancing, this field identifies the network + that the load balanced IP should belong to for + this Forwarding Rule. If the subnetwork is + specified, the network of the subnetwork will be + used. If neither subnetwork nor this field is + specified, the default network will be used. For + Private Service Connect forwarding rules that + forward traffic to Google APIs, a network must + be provided. This field is a member of `oneof`_ ``_network``. network_tier (str): @@ -26255,45 +27198,50 @@ class ForwardingRule(proto.Message): This is used in PSC consumer ForwardingRule to control whether it should try to auto-generate a DNS zone or not. Non-PSC - forwarding rules do not use this field. + forwarding rules do not use this field. Once + set, this field is not mutable. This field is a member of `oneof`_ ``_no_automate_dns_zone``. port_range (str): - This field can only be used: - If IPProtocol is one of TCP, - UDP, or SCTP. - By backend service-based network load - balancers, target pool-based network load balancers, - internal proxy load balancers, external proxy load - balancers, Traffic Director, external protocol forwarding, - and Classic VPN. Some products have restrictions on what - ports can be used. See port specifications for details. Only - packets addressed to ports in the specified range will be - forwarded to the backends configured with this forwarding - rule. The ports, port_range, and allPorts fields are - mutually exclusive. For external forwarding rules, two or - more forwarding rules cannot use the same [IPAddress, - IPProtocol] pair, and cannot have overlapping portRanges. - For internal forwarding rules within the same VPC network, - two or more forwarding rules cannot use the same [IPAddress, - IPProtocol] pair, and cannot have overlapping portRanges. - @pattern: \\d+(?:-\d+)? + The ports, portRange, and allPorts fields are mutually + exclusive. Only packets addressed to ports in the specified + range will be forwarded to the backends configured with this + forwarding rule. The portRange field has the following + limitations: - It requires that the forwarding rule + IPProtocol be TCP, UDP, or SCTP, and - It's applicable only + to the following products: external passthrough Network Load + Balancers, internal and external proxy Network Load + Balancers, internal and external Application Load Balancers, + external protocol forwarding, and Classic VPN. - Some + products have restrictions on what ports can be used. See + port specifications for details. For external forwarding + rules, two or more forwarding rules cannot use the same + [IPAddress, IPProtocol] pair, and cannot have overlapping + portRanges. For internal forwarding rules within the same + VPC network, two or more forwarding rules cannot use the + same [IPAddress, IPProtocol] pair, and cannot have + overlapping portRanges. @pattern: \\d+(?:-\d+)? This field is a member of `oneof`_ ``_port_range``. ports (MutableSequence[str]): - This field can only be used: - If IPProtocol is one of TCP, - UDP, or SCTP. - By internal TCP/UDP load balancers, backend - service-based network load balancers, and internal protocol - forwarding. You can specify a list of up to five ports by - number, separated by commas. The ports can be contiguous or - discontiguous. Only packets addressed to these ports will be - forwarded to the backends configured with this forwarding - rule. For external forwarding rules, two or more forwarding - rules cannot use the same [IPAddress, IPProtocol] pair, and - cannot share any values defined in ports. For internal + The ports, portRange, and allPorts fields are mutually + exclusive. Only packets addressed to ports in the specified + range will be forwarded to the backends configured with this + forwarding rule. The ports field has the following + limitations: - It requires that the forwarding rule + IPProtocol be TCP, UDP, or SCTP, and - It's applicable only + to the following products: internal passthrough Network Load + Balancers, backend service-based external passthrough + Network Load Balancers, and internal protocol forwarding. - + You can specify a list of up to five ports by number, + separated by commas. The ports can be contiguous or + discontiguous. For external forwarding rules, two or more + forwarding rules cannot use the same [IPAddress, IPProtocol] + pair if they share at least one port number. For internal forwarding rules within the same VPC network, two or more forwarding rules cannot use the same [IPAddress, IPProtocol] - pair, and cannot share any values defined in ports. The - ports, port_range, and allPorts fields are mutually - exclusive. @pattern: \\d+(?:-\d+)? + pair if they share at least one port number. @pattern: + \\d+(?:-\d+)? psc_connection_id (int): [Output Only] The PSC connection id of the PSC Forwarding Rule. @@ -26375,6 +27323,7 @@ class ForwardingRule(proto.Message): Controls. - all-apis - All supported Google APIs. - For Private Service Connect forwarding rules that forward traffic to managed services, the target must be a service + attachment. The target is not mutable once set as a service attachment. This field is a member of `oneof`_ ``_target``. @@ -27878,8 +28827,8 @@ class GetHealthTargetPoolRequest(proto.Message): ) -class GetIamPolicyBackendServiceRequest(proto.Message): - r"""A request message for BackendServices.GetIamPolicy. See the +class GetIamPolicyBackendBucketRequest(proto.Message): + r"""A request message for BackendBuckets.GetIamPolicy. See the method description for details. @@ -27911,77 +28860,110 @@ class GetIamPolicyBackendServiceRequest(proto.Message): ) -class GetIamPolicyDiskRequest(proto.Message): - r"""A request message for Disks.GetIamPolicy. See the method - description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - options_requested_policy_version (int): - Requested IAM Policy version. - - This field is a member of `oneof`_ ``_options_requested_policy_version``. - project (str): - Project ID for this request. - resource (str): - Name or id of the resource for this request. - zone (str): - The name of the zone for this request. - """ - - options_requested_policy_version: int = proto.Field( - proto.INT32, - number=499220029, - optional=True, - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) - - -class GetIamPolicyFirewallPolicyRequest(proto.Message): - r"""A request message for FirewallPolicies.GetIamPolicy. See the +class GetIamPolicyBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.GetIamPolicy. See the method description for details. - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - options_requested_policy_version (int): - Requested IAM Policy version. - - This field is a member of `oneof`_ ``_options_requested_policy_version``. - resource (str): - Name or id of the resource for this request. - """ - - options_requested_policy_version: int = proto.Field( - proto.INT32, - number=499220029, - optional=True, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - - -class GetIamPolicyImageRequest(proto.Message): - r"""A request message for Images.GetIamPolicy. See the method - description for details. - - + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyDiskRequest(proto.Message): + r"""A request message for Disks.GetIamPolicy. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetIamPolicyFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyImageRequest(proto.Message): + r"""A request message for Images.GetIamPolicy. See the method + description for details. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -28940,6 +29922,71 @@ class GetMachineTypeRequest(proto.Message): ) +class GetMacsecConfigInterconnectRequest(proto.Message): + r"""A request message for Interconnects.GetMacsecConfig. See the + method description for details. + + Attributes: + interconnect (str): + Name of the interconnect resource to query. + project (str): + Project ID for this request. + """ + + interconnect: str = proto.Field( + proto.STRING, + number=224601230, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetNatIpInfoRouterRequest(proto.Message): + r"""A request message for Routers.GetNatIpInfo. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + nat_name (str): + Name of the nat service to filter the NAT IP + information. If it is omitted, all nats for this + router will be returned. Name should conform to + RFC1035. + + This field is a member of `oneof`_ ``_nat_name``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + router (str): + Name of the Router resource to query for Nat + IP information. The name should conform to + RFC1035. + """ + + nat_name: str = proto.Field( + proto.STRING, + number=425596649, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + router: str = proto.Field( + proto.STRING, + number=148608841, + ) + + class GetNatMappingInfoRoutersRequest(proto.Message): r"""A request message for Routers.GetNatMappingInfo. See the method description for details. @@ -28953,16 +30000,15 @@ class GetNatMappingInfoRoutersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -28989,7 +30035,8 @@ class GetNatMappingInfoRoutersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -30225,6 +31272,47 @@ class GetRuleRegionNetworkFirewallPolicyRequest(proto.Message): ) +class GetRuleRegionSecurityPolicyRequest(proto.Message): + r"""A request message for RegionSecurityPolicies.GetRule. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + priority (int): + The priority of the rule to get from the + security policy. + + This field is a member of `oneof`_ ``_priority``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + security_policy (str): + Name of the security policy to which the + queried rule belongs. + """ + + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + ) + + class GetRuleSecurityPolicyRequest(proto.Message): r"""A request message for SecurityPolicies.GetRule. See the method description for details. @@ -30448,6 +31536,21 @@ class GetSnapshotRequest(proto.Message): ) +class GetSnapshotSettingRequest(proto.Message): + r"""A request message for SnapshotSettingsService.Get. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + class GetSslCertificateRequest(proto.Message): r"""A request message for SslCertificates.Get. See the method description for details. @@ -30842,16 +31945,15 @@ class GetXpnResourcesProjectsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -30878,7 +31980,8 @@ class GetXpnResourcesProjectsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -31356,6 +32459,8 @@ class Type(proto.Enum): No description available. SEV_LIVE_MIGRATABLE (392039820): No description available. + SEV_LIVE_MIGRATABLE_V2 (168551983): + No description available. SEV_SNP_CAPABLE (426919): No description available. UEFI_COMPATIBLE (195865408): @@ -31372,6 +32477,7 @@ class Type(proto.Enum): SECURE_BOOT = 376811194 SEV_CAPABLE = 87083793 SEV_LIVE_MIGRATABLE = 392039820 + SEV_LIVE_MIGRATABLE_V2 = 168551983 SEV_SNP_CAPABLE = 426919 UEFI_COMPATIBLE = 195865408 VIRTIO_SCSI_MULTIQUEUE = 201597069 @@ -31923,23 +33029,26 @@ class ProxyHeader(proto.Enum): class HealthCheck(proto.Message): - r"""Represents a Health Check resource. Google Compute Engine has two - Health Check resources: \* - `Global `__ \* - `Regional `__ - Internal HTTP(S) load balancers must use regional health checks - (``compute.v1.regionHealthChecks``). Traffic Director must use - global health checks (``compute.v1.healthChecks``). Internal TCP/UDP - load balancers can use either regional or global health checks - (``compute.v1.regionHealthChecks`` or ``compute.v1.healthChecks``). - External HTTP(S), TCP proxy, and SSL proxy load balancers as well as - managed instance group auto-healing must use global health checks - (``compute.v1.healthChecks``). Backend service-based network load - balancers must use regional health checks - (``compute.v1.regionHealthChecks``). Target pool-based network load - balancers must use legacy HTTP health checks - (``compute.v1.httpHealthChecks``). For more information, see Health - checks overview. + r"""Represents a health check resource. Google Compute Engine has two + health check resources: \* + `Regional `__ \* + `Global `__ These + health check resources can be used for load balancing and for + autohealing VMs in a managed instance group (MIG). **Load + balancing** The following load balancer can use either regional or + global health check: \* Internal TCP/UDP load balancer The following + load balancers require regional health check: \* Internal HTTP(S) + load balancer \* Backend service-based network load balancer Traffic + Director and the following load balancers require global health + check: \* External HTTP(S) load balancer \* TCP proxy load balancer + \* SSL proxy load balancer The following load balancer require + `legacy HTTP health + checks `__: \* + Target pool-based network load balancer **Autohealing in MIGs** The + health checks that you use for autohealing VMs in a MIG can be + either regional or global. For more information, see Set up an + application health check and autohealing. For more information, see + Health checks overview. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -33668,9 +34777,9 @@ class HttpRouteAction(proto.Message): by clients that are configured with a fault_injection_policy if: 1. The traffic is generated by fault injection AND 2. The fault injection is not a delay fault injection. Fault - injection is not supported with the global external HTTP(S) - load balancer (classic). To see which load balancers support - fault injection, see Load balancing: Routing and traffic + injection is not supported with the classic Application Load + Balancer . To see which load balancers support fault + injection, see Load balancing: Routing and traffic management features. This field is a member of `oneof`_ ``_fault_injection_policy``. @@ -33721,9 +34830,9 @@ class HttpRouteAction(proto.Message): The spec to modify the URL of the request, before forwarding the request to the matched service. urlRewrite is the only action supported - in UrlMaps for external HTTP(S) load balancers. - Not supported when the URL map is bound to a - target gRPC proxy that has the + in UrlMaps for classic Application Load + Balancers. Not supported when the URL map is + bound to a target gRPC proxy that has the validateForProxyless field set to true. This field is a member of `oneof`_ ``_url_rewrite``. @@ -33861,9 +34970,9 @@ class HttpRouteRule(proto.Message): cannot contain any weightedBackendServices. Only one of urlRedirect, service or routeAction.weightedBackendService must be set. - URL maps for Classic external HTTP(S) load - balancers only support the urlRewrite action - within a route rule's routeAction. + URL maps for classic Application Load Balancers + only support the urlRewrite action within a + route rule's routeAction. This field is a member of `oneof`_ ``_route_action``. service (str): @@ -42192,9 +43301,17 @@ class InstanceReference(proto.Message): class InstanceTemplate(proto.Message): - r"""Represents an Instance Template resource. You can use - instance templates to create VM instances and managed instance - groups. For more information, read Instance Templates. + r"""Represents an Instance Template resource. Google Compute Engine has + two Instance Template resources: \* + `Global `__ \* + `Regional `__ + You can reuse a global instance template in different regions + whereas you can use a regional instance template in a specified + region only. If you want to reduce cross-region dependency or + achieve data residency, use a regional instance template. To create + VMs, managed instance groups, and reservations, you can use either + global or regional instance templates. For more information, read + Instance Templates. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -42595,6 +43712,25 @@ class InstancesAddResourcePoliciesRequest(proto.Message): ) +class InstancesBulkInsertOperationMetadata(proto.Message): + r""" + + Attributes: + per_location_status (MutableMapping[str, google.cloud.compute_v1.types.BulkInsertOperationStatus]): + Status information per location (location + name is key). Example key: zones/us-central1-a + """ + + per_location_status: MutableMapping[ + str, "BulkInsertOperationStatus" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=167851162, + message="BulkInsertOperationStatus", + ) + + class InstancesGetEffectiveFirewallsResponse(proto.Message): r""" @@ -42859,6 +43995,38 @@ class InstancesSetNameRequest(proto.Message): ) +class InstancesSetSecurityPolicyRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_interfaces (MutableSequence[str]): + The network interfaces that the security + policy will be applied to. Network interfaces + use the nicN naming format. You can only set a + security policy for network interfaces with an + access config. + security_policy (str): + A full or partial URL to a security policy to + add to this instance. If this field is set to an + empty string it will remove the associated + security policy. + + This field is a member of `oneof`_ ``_security_policy``. + """ + + network_interfaces: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=52735243, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + optional=True, + ) + + class InstancesSetServiceAccountRequest(proto.Message): r""" @@ -42957,6 +44125,15 @@ class Interconnect(proto.Message): over it. By default, the status is set to true. This field is a member of `oneof`_ ``_admin_enabled``. + available_features (MutableSequence[str]): + [Output only] List of features available for this + Interconnect connection, which can take one of the following + values: - MACSEC If present then the Interconnect connection + is provisioned on MACsec capable hardware ports. If not + present then the Interconnect connection is provisioned on + non-MACsec capable ports and MACsec isn't supported and + enabling MACsec fails. Check the AvailableFeatures enum for + the list of possible values. circuit_infos (MutableSequence[google.cloud.compute_v1.types.InterconnectCircuitInfo]): [Output Only] A list of CircuitInfo objects, that describe the individual circuits in this LAG. @@ -43048,6 +44225,19 @@ class Interconnect(proto.Message): provisioned. This field is a member of `oneof`_ ``_location``. + macsec (google.cloud.compute_v1.types.InterconnectMacsec): + Configuration that enables Media Access + Control security (MACsec) on the Cloud + Interconnect connection between Google and your + on-premises router. + + This field is a member of `oneof`_ ``_macsec``. + macsec_enabled (bool): + Enable or disable MACsec on this Interconnect + connection. MACsec enablement fails if the + MACsec object is not specified. + + This field is a member of `oneof`_ ``_macsec_enabled``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -43103,6 +44293,18 @@ class Interconnect(proto.Message): interconnect is connected to. This field is a member of `oneof`_ ``_remote_location``. + requested_features (MutableSequence[str]): + Optional. List of features requested for this + Interconnect connection, which can take one of + the following values: - MACSEC If specified then + the connection is created on MACsec capable + hardware ports. If not specified, the default + value is false, which allocates non-MACsec + capable ports first if available. This parameter + can be provided only with Interconnect INSERT. + It isn't valid for Interconnect PATCH. Check the + RequestedFeatures enum for the list of possible + values. requested_link_count (int): Target number of physical links in the link bundle, as requested by the customer. @@ -43131,6 +44333,17 @@ class Interconnect(proto.Message): This field is a member of `oneof`_ ``_state``. """ + class AvailableFeatures(proto.Enum): + r"""Additional supported values which may be not listed in the enum + directly due to technical reasons: IF_MACSEC + + Values: + UNDEFINED_AVAILABLE_FEATURES (0): + A value indicating that the enum field is not + set. + """ + UNDEFINED_AVAILABLE_FEATURES = 0 + class InterconnectType(proto.Enum): r"""Type of interconnect, which can take one of the following values: - PARTNER: A partner-managed interconnection shared between customers @@ -43205,6 +44418,17 @@ class OperationalStatus(proto.Enum): OS_ACTIVE = 55721409 OS_UNPROVISIONED = 239771840 + class RequestedFeatures(proto.Enum): + r"""Additional supported values which may be not listed in the enum + directly due to technical reasons: IF_MACSEC + + Values: + UNDEFINED_REQUESTED_FEATURES (0): + A value indicating that the enum field is not + set. + """ + UNDEFINED_REQUESTED_FEATURES = 0 + class State(proto.Enum): r"""[Output Only] The current state of Interconnect functionality, which can take one of the following values: - ACTIVE: The Interconnect is @@ -43237,6 +44461,10 @@ class State(proto.Enum): number=445675089, optional=True, ) + available_features: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=496344307, + ) circuit_infos: MutableSequence["InterconnectCircuitInfo"] = proto.RepeatedField( proto.MESSAGE, number=164839855, @@ -43313,6 +44541,17 @@ class State(proto.Enum): number=290430901, optional=True, ) + macsec: "InterconnectMacsec" = proto.Field( + proto.MESSAGE, + number=528867490, + optional=True, + message="InterconnectMacsec", + ) + macsec_enabled: bool = proto.Field( + proto.BOOL, + number=194203812, + optional=True, + ) name: str = proto.Field( proto.STRING, number=3373707, @@ -43343,6 +44582,10 @@ class State(proto.Enum): number=324388750, optional=True, ) + requested_features: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=461240814, + ) requested_link_count: int = proto.Field( proto.INT32, number=45051387, @@ -43586,7 +44829,7 @@ class InterconnectAttachment(proto.Message): pairing_key (str): [Output only for type PARTNER. Input only for PARTNER_PROVIDER. Not present for DEDICATED]. The opaque - identifier of an PARTNER attachment used to initiate + identifier of a PARTNER attachment used to initiate provisioning with a selected partner. Of the form "XXXXX/region/domain". @@ -43600,9 +44843,9 @@ class InterconnectAttachment(proto.Message): This field is a member of `oneof`_ ``_partner_asn``. partner_metadata (google.cloud.compute_v1.types.InterconnectAttachmentPartnerMetadata): Informational metadata about Partner attachments from - Partners to display to customers. Output only for for - PARTNER type, mutable for PARTNER_PROVIDER, not available - for DEDICATED. + Partners to display to customers. Output only for PARTNER + type, mutable for PARTNER_PROVIDER, not available for + DEDICATED. This field is a member of `oneof`_ ``_partner_metadata``. private_interconnect_info (google.cloud.compute_v1.types.InterconnectAttachmentPrivateInfo): @@ -44876,6 +46119,11 @@ class InterconnectDiagnosticsLinkStatus(proto.Message): lacp_status (google.cloud.compute_v1.types.InterconnectDiagnosticsLinkLACPStatus): This field is a member of `oneof`_ ``_lacp_status``. + macsec (google.cloud.compute_v1.types.InterconnectDiagnosticsMacsecStatus): + Describes the status of MACsec encryption on + this link. + + This field is a member of `oneof`_ ``_macsec``. operational_status (str): The operational status of the link. Check the OperationalStatus enum for the list of @@ -44937,6 +46185,12 @@ class OperationalStatus(proto.Enum): optional=True, message="InterconnectDiagnosticsLinkLACPStatus", ) + macsec: "InterconnectDiagnosticsMacsecStatus" = proto.Field( + proto.MESSAGE, + number=528867490, + optional=True, + message="InterconnectDiagnosticsMacsecStatus", + ) operational_status: str = proto.Field( proto.STRING, number=201070847, @@ -44956,6 +46210,37 @@ class OperationalStatus(proto.Enum): ) +class InterconnectDiagnosticsMacsecStatus(proto.Message): + r"""Describes the status of MACsec encryption on the link. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ckn (str): + Indicates the Connectivity Association Key + Name (CKN) currently being used if MACsec is + operational. + + This field is a member of `oneof`_ ``_ckn``. + operational (bool): + Indicates whether or not MACsec is + operational on this link. + + This field is a member of `oneof`_ ``_operational``. + """ + + ckn: str = proto.Field( + proto.STRING, + number=98566, + optional=True, + ) + operational: bool = proto.Field( + proto.BOOL, + number=129704914, + optional=True, + ) + + class InterconnectList(proto.Message): r"""Response to the list request, and contains a list of interconnects. @@ -45055,6 +46340,17 @@ class InterconnectLocation(proto.Message): than one availability zone. Example: "zone1" or "zone2". This field is a member of `oneof`_ ``_availability_zone``. + available_features (MutableSequence[str]): + [Output only] List of features available at this + InterconnectLocation, which can take one of the following + values: - MACSEC Check the AvailableFeatures enum for the + list of possible values. + available_link_types (MutableSequence[str]): + [Output only] List of link types available at this + InterconnectLocation, which can take one of the following + values: - LINK_TYPE_ETHERNET_10G_LR - + LINK_TYPE_ETHERNET_100G_LR Check the AvailableLinkTypes enum + for the list of possible values. city (str): [Output Only] Metropolitan area designator that indicates which city an interconnect is located. For example: @@ -45130,6 +46426,35 @@ class InterconnectLocation(proto.Message): This field is a member of `oneof`_ ``_supports_pzs``. """ + class AvailableFeatures(proto.Enum): + r""" + + Values: + UNDEFINED_AVAILABLE_FEATURES (0): + A value indicating that the enum field is not + set. + IF_MACSEC (396279300): + Media Access Control security (MACsec) + """ + UNDEFINED_AVAILABLE_FEATURES = 0 + IF_MACSEC = 396279300 + + class AvailableLinkTypes(proto.Enum): + r""" + + Values: + UNDEFINED_AVAILABLE_LINK_TYPES (0): + A value indicating that the enum field is not + set. + LINK_TYPE_ETHERNET_100G_LR (337672551): + 100G Ethernet, LR Optics. + LINK_TYPE_ETHERNET_10G_LR (236739749): + 10G Ethernet, LR Optics. [(rate_bps) = 10000000000]; + """ + UNDEFINED_AVAILABLE_LINK_TYPES = 0 + LINK_TYPE_ETHERNET_100G_LR = 337672551 + LINK_TYPE_ETHERNET_10G_LR = 236739749 + class Continent(proto.Enum): r"""[Output Only] Continent for this location, which can take one of the following values: - AFRICA - ASIA_PAC - EUROPE - NORTH_AMERICA - @@ -45204,6 +46529,14 @@ class Status(proto.Enum): number=158459920, optional=True, ) + available_features: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=496344307, + ) + available_link_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=509504298, + ) city: str = proto.Field( proto.STRING, number=3053931, @@ -45425,6 +46758,170 @@ class LocationPresence(proto.Enum): ) +class InterconnectMacsec(proto.Message): + r"""Configuration information for enabling Media Access Control + security (MACsec) on this Cloud Interconnect connection between + Google and your on-premises router. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fail_open (bool): + If set to true, the Interconnect connection + is configured with a should-secure MACsec + security policy, that allows the Google router + to fallback to cleartext traffic if the MKA + session cannot be established. By default, the + Interconnect connection is configured with a + must-secure security policy that drops all + traffic if the MKA session cannot be established + with your router. + + This field is a member of `oneof`_ ``_fail_open``. + pre_shared_keys (MutableSequence[google.cloud.compute_v1.types.InterconnectMacsecPreSharedKey]): + Required. A keychain placeholder describing a + set of named key objects along with their start + times. A MACsec CKN/CAK is generated for each + key in the key chain. Google router + automatically picks the key with the most recent + startTime when establishing or re-establishing a + MACsec secure link. + """ + + fail_open: bool = proto.Field( + proto.BOOL, + number=532597451, + optional=True, + ) + pre_shared_keys: MutableSequence[ + "InterconnectMacsecPreSharedKey" + ] = proto.RepeatedField( + proto.MESSAGE, + number=420308466, + message="InterconnectMacsecPreSharedKey", + ) + + +class InterconnectMacsecConfig(proto.Message): + r"""MACsec configuration information for the Interconnect + connection. Contains the generated Connectivity Association Key + Name (CKN) and the key (CAK) for this Interconnect connection. + + Attributes: + pre_shared_keys (MutableSequence[google.cloud.compute_v1.types.InterconnectMacsecConfigPreSharedKey]): + A keychain placeholder describing a set of + named key objects along with their start times. + A MACsec CKN/CAK is generated for each key in + the key chain. Google router automatically picks + the key with the most recent startTime when + establishing or re-establishing a MACsec secure + link. + """ + + pre_shared_keys: MutableSequence[ + "InterconnectMacsecConfigPreSharedKey" + ] = proto.RepeatedField( + proto.MESSAGE, + number=420308466, + message="InterconnectMacsecConfigPreSharedKey", + ) + + +class InterconnectMacsecConfigPreSharedKey(proto.Message): + r"""Describes a pre-shared key used to setup MACsec in static + connectivity association key (CAK) mode. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cak (str): + An auto-generated Connectivity Association + Key (CAK) for this key. + + This field is a member of `oneof`_ ``_cak``. + ckn (str): + An auto-generated Connectivity Association + Key Name (CKN) for this key. + + This field is a member of `oneof`_ ``_ckn``. + name (str): + User provided name for this pre-shared key. + + This field is a member of `oneof`_ ``_name``. + start_time (str): + User provided timestamp on or after which + this key is valid. + + This field is a member of `oneof`_ ``_start_time``. + """ + + cak: str = proto.Field( + proto.STRING, + number=98253, + optional=True, + ) + ckn: str = proto.Field( + proto.STRING, + number=98566, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + start_time: str = proto.Field( + proto.STRING, + number=37467274, + optional=True, + ) + + +class InterconnectMacsecPreSharedKey(proto.Message): + r"""Describes a pre-shared key used to setup MACsec in static + connectivity association key (CAK) mode. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. A name for this pre-shared key. The name must be + 1-63 characters long, and comply with RFC1035. Specifically, + the name must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the + first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, or + digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + start_time (str): + A RFC3339 timestamp on or after which the key + is valid. startTime can be in the future. If the + keychain has a single key, startTime can be + omitted. If the keychain has multiple keys, + startTime is mandatory for each key. The start + times of keys must be in increasing order. The + start times of two consecutive keys must be at + least 6 hours apart. + + This field is a member of `oneof`_ ``_start_time``. + """ + + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + start_time: str = proto.Field( + proto.STRING, + number=37467274, + optional=True, + ) + + class InterconnectOutageNotification(proto.Message): r"""Description of a planned outage on this Interconnect. @@ -46184,6 +47681,34 @@ class InterconnectsGetDiagnosticsResponse(proto.Message): ) +class InterconnectsGetMacsecConfigResponse(proto.Message): + r"""Response for the InterconnectsGetMacsecConfigRequest. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + etag (str): + end_interface: MixerGetResponseWithEtagBuilder + + This field is a member of `oneof`_ ``_etag``. + result (google.cloud.compute_v1.types.InterconnectMacsecConfig): + + This field is a member of `oneof`_ ``_result``. + """ + + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + result: "InterconnectMacsecConfig" = proto.Field( + proto.MESSAGE, + number=139315229, + optional=True, + message="InterconnectMacsecConfig", + ) + + class InvalidateCacheUrlMapRequest(proto.Message): r"""A request message for UrlMaps.InvalidateCache. See the method description for details. @@ -46694,16 +48219,15 @@ class ListAcceleratorTypesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -46730,7 +48254,8 @@ class ListAcceleratorTypesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -46821,16 +48346,15 @@ class ListAddressesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -46857,7 +48381,8 @@ class ListAddressesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -46970,16 +48495,15 @@ class ListAutoscalersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -47006,7 +48530,8 @@ class ListAutoscalersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -47098,16 +48623,15 @@ class ListAvailableFeaturesRegionSslPoliciesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -47134,7 +48658,8 @@ class ListAvailableFeaturesRegionSslPoliciesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -47225,16 +48750,15 @@ class ListAvailableFeaturesSslPoliciesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -47261,7 +48785,8 @@ class ListAvailableFeaturesSslPoliciesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -47346,16 +48871,15 @@ class ListBackendBucketsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -47382,7 +48906,8 @@ class ListBackendBucketsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -47467,16 +48992,15 @@ class ListBackendServicesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -47503,7 +49027,8 @@ class ListBackendServicesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -47588,16 +49113,15 @@ class ListDiskTypesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -47624,7 +49148,8 @@ class ListDiskTypesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -47715,16 +49240,15 @@ class ListDisksRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -47751,7 +49275,8 @@ class ListDisksRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -47842,16 +49367,15 @@ class ListErrorsInstanceGroupManagersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -47878,7 +49402,8 @@ class ListErrorsInstanceGroupManagersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. instance_group_manager (str): @@ -47980,16 +49505,15 @@ class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -48016,7 +49540,8 @@ class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. instance_group_manager (str): @@ -48117,16 +49642,15 @@ class ListExternalVpnGatewaysRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -48153,7 +49677,8 @@ class ListExternalVpnGatewaysRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -48238,16 +49763,15 @@ class ListFirewallPoliciesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -48274,7 +49798,8 @@ class ListFirewallPoliciesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -48365,16 +49890,15 @@ class ListFirewallsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -48401,7 +49925,8 @@ class ListFirewallsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -48486,16 +50011,15 @@ class ListForwardingRulesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -48522,7 +50046,8 @@ class ListForwardingRulesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -48613,16 +50138,15 @@ class ListGlobalAddressesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -48649,7 +50173,8 @@ class ListGlobalAddressesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -48734,16 +50259,15 @@ class ListGlobalForwardingRulesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -48770,7 +50294,8 @@ class ListGlobalForwardingRulesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -48855,16 +50380,15 @@ class ListGlobalNetworkEndpointGroupsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -48891,7 +50415,8 @@ class ListGlobalNetworkEndpointGroupsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -48976,16 +50501,15 @@ class ListGlobalOperationsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -49012,7 +50536,8 @@ class ListGlobalOperationsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -49097,16 +50622,15 @@ class ListGlobalOrganizationOperationsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -49133,7 +50657,8 @@ class ListGlobalOrganizationOperationsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -49221,16 +50746,15 @@ class ListGlobalPublicDelegatedPrefixesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -49257,7 +50781,8 @@ class ListGlobalPublicDelegatedPrefixesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -49342,16 +50867,15 @@ class ListHealthChecksRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -49378,7 +50902,8 @@ class ListHealthChecksRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -49463,16 +50988,15 @@ class ListImagesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -49499,7 +51023,8 @@ class ListImagesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -49584,16 +51109,15 @@ class ListInstanceGroupManagersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -49620,7 +51144,8 @@ class ListInstanceGroupManagersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -49712,16 +51237,15 @@ class ListInstanceGroupsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -49748,7 +51272,8 @@ class ListInstanceGroupsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -49840,16 +51365,15 @@ class ListInstanceTemplatesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -49876,7 +51400,8 @@ class ListInstanceTemplatesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -49961,16 +51486,15 @@ class ListInstancesInstanceGroupsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -49997,7 +51521,8 @@ class ListInstancesInstanceGroupsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. instance_group (str): @@ -50103,16 +51628,15 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -50139,7 +51663,8 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. instance_group (str): @@ -50244,16 +51769,15 @@ class ListInstancesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -50280,7 +51804,8 @@ class ListInstancesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -50371,16 +51896,15 @@ class ListInterconnectAttachmentsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -50407,7 +51931,8 @@ class ListInterconnectAttachmentsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -50498,16 +52023,15 @@ class ListInterconnectLocationsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -50534,7 +52058,8 @@ class ListInterconnectLocationsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -50619,16 +52144,15 @@ class ListInterconnectRemoteLocationsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -50655,7 +52179,8 @@ class ListInterconnectRemoteLocationsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -50740,16 +52265,15 @@ class ListInterconnectsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -50776,7 +52300,8 @@ class ListInterconnectsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -50861,16 +52386,15 @@ class ListLicensesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -50897,7 +52421,8 @@ class ListLicensesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -50982,16 +52507,15 @@ class ListMachineImagesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -51018,7 +52542,8 @@ class ListMachineImagesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -51103,16 +52628,15 @@ class ListMachineTypesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -51139,7 +52663,8 @@ class ListMachineTypesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -51231,16 +52756,15 @@ class ListManagedInstancesInstanceGroupManagersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -51267,7 +52791,8 @@ class ListManagedInstancesInstanceGroupManagersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. instance_group_manager (str): @@ -51366,16 +52891,15 @@ class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -51402,7 +52926,8 @@ class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. instance_group_manager (str): @@ -51499,16 +53024,15 @@ class ListNetworkAttachmentsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -51535,7 +53059,8 @@ class ListNetworkAttachmentsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -51626,16 +53151,15 @@ class ListNetworkEndpointGroupsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -51662,7 +53186,8 @@ class ListNetworkEndpointGroupsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -51756,16 +53281,15 @@ class ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -51792,7 +53316,8 @@ class ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -51887,16 +53412,15 @@ class ListNetworkEndpointsNetworkEndpointGroupsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -51923,7 +53447,8 @@ class ListNetworkEndpointsNetworkEndpointGroupsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -52019,129 +53544,9 @@ class ListNetworkEndpointsNetworkEndpointGroupsRequest(proto.Message): ) -class ListNetworkFirewallPoliciesRequest(proto.Message): - r"""A request message for NetworkFirewallPolicies.List. See the - method description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. Most Compute resources support two types of filter - expressions: expressions that support regular expressions - and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` - comparison can be used to test whether a key has been - defined. For example, to find all objects with ``owner`` - label use: ``labels.owner:*`` You can also filter nested - fields. For example, you could specify - ``scheduling.automaticRestart = false`` to include instances - only if they are not scheduled for automatic restarts. You - can use filtering on nested fields to filter based on - resource labels. To filter on multiple expressions, provide - each separate expression within parentheses. For example: - ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` - If you want to use a regular expression, use the ``eq`` - (equal) or ``ne`` (not equal) operator against a single - un-parenthesized expression with or without quotes or - against multiple parenthesized expressions. Examples: - ``fieldname eq unquoted literal`` - ``fieldname eq 'single quoted literal'`` - ``fieldname eq "double quoted literal"`` - ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The - literal value is interpreted as a regular expression using - Google RE2 library syntax. The literal value must match the - entire field. For example, to filter for instances that do - not end with name "instance", you would use - ``name ne .*instance``. - - This field is a member of `oneof`_ ``_filter``. - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) - - This field is a member of `oneof`_ ``_max_results``. - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. You can also sort results in descending order based on - the creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. - - This field is a member of `oneof`_ ``_order_by``. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - - This field is a member of `oneof`_ ``_page_token``. - project (str): - Project ID for this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - - This field is a member of `oneof`_ ``_return_partial_success``. - """ - - filter: str = proto.Field( - proto.STRING, - number=336120696, - optional=True, - ) - max_results: int = proto.Field( - proto.UINT32, - number=54715419, - optional=True, - ) - order_by: str = proto.Field( - proto.STRING, - number=160562920, - optional=True, - ) - page_token: str = proto.Field( - proto.STRING, - number=19994697, - optional=True, - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - return_partial_success: bool = proto.Field( - proto.BOOL, - number=517198390, - optional=True, - ) - - -class ListNetworksRequest(proto.Message): - r"""A request message for Networks.List. See the method +class ListNetworkEndpointsRegionNetworkEndpointGroupsRequest(proto.Message): + r"""A request message for + RegionNetworkEndpointGroups.ListNetworkEndpoints. See the method description for details. @@ -52153,16 +53558,15 @@ class ListNetworksRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -52189,7 +53593,8 @@ class ListNetworksRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -52201,6 +53606,11 @@ class ListNetworksRequest(proto.Message): ``500``, inclusive. (Default: ``500``) This field is a member of `oneof`_ ``_max_results``. + network_endpoint_group (str): + The name of the network endpoint group from + which you want to generate a list of included + network endpoints. It should comply with + RFC1035. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -52222,6 +53632,10 @@ class ListNetworksRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + The name of the region where the network + endpoint group is located. It should comply with + RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -52240,128 +53654,9 @@ class ListNetworksRequest(proto.Message): number=54715419, optional=True, ) - order_by: str = proto.Field( - proto.STRING, - number=160562920, - optional=True, - ) - page_token: str = proto.Field( - proto.STRING, - number=19994697, - optional=True, - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - return_partial_success: bool = proto.Field( - proto.BOOL, - number=517198390, - optional=True, - ) - - -class ListNodeGroupsRequest(proto.Message): - r"""A request message for NodeGroups.List. See the method - description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. Most Compute resources support two types of filter - expressions: expressions that support regular expressions - and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` - comparison can be used to test whether a key has been - defined. For example, to find all objects with ``owner`` - label use: ``labels.owner:*`` You can also filter nested - fields. For example, you could specify - ``scheduling.automaticRestart = false`` to include instances - only if they are not scheduled for automatic restarts. You - can use filtering on nested fields to filter based on - resource labels. To filter on multiple expressions, provide - each separate expression within parentheses. For example: - ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` - If you want to use a regular expression, use the ``eq`` - (equal) or ``ne`` (not equal) operator against a single - un-parenthesized expression with or without quotes or - against multiple parenthesized expressions. Examples: - ``fieldname eq unquoted literal`` - ``fieldname eq 'single quoted literal'`` - ``fieldname eq "double quoted literal"`` - ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The - literal value is interpreted as a regular expression using - Google RE2 library syntax. The literal value must match the - entire field. For example, to filter for instances that do - not end with name "instance", you would use - ``name ne .*instance``. - - This field is a member of `oneof`_ ``_filter``. - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) - - This field is a member of `oneof`_ ``_max_results``. - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. You can also sort results in descending order based on - the creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. - - This field is a member of `oneof`_ ``_order_by``. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - - This field is a member of `oneof`_ ``_page_token``. - project (str): - Project ID for this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - - This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone for this request. - """ - - filter: str = proto.Field( + network_endpoint_group: str = proto.Field( proto.STRING, - number=336120696, - optional=True, - ) - max_results: int = proto.Field( - proto.UINT32, - number=54715419, - optional=True, + number=433907078, ) order_by: str = proto.Field( proto.STRING, @@ -52377,20 +53672,20 @@ class ListNodeGroupsRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListNodeTemplatesRequest(proto.Message): - r"""A request message for NodeTemplates.List. See the method - description for details. +class ListNetworkFirewallPoliciesRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -52401,16 +53696,15 @@ class ListNodeTemplatesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -52437,7 +53731,8 @@ class ListNodeTemplatesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -52470,8 +53765,6 @@ class ListNodeTemplatesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - The name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -52504,10 +53797,6 @@ class ListNodeTemplatesRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -52515,8 +53804,8 @@ class ListNodeTemplatesRequest(proto.Message): ) -class ListNodeTypesRequest(proto.Message): - r"""A request message for NodeTypes.List. See the method +class ListNetworksRequest(proto.Message): + r"""A request message for Networks.List. See the method description for details. @@ -52528,16 +53817,15 @@ class ListNodeTypesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -52564,7 +53852,8 @@ class ListNodeTypesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -52603,8 +53892,6 @@ class ListNodeTypesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone for this request. """ filter: str = proto.Field( @@ -52636,14 +53923,10 @@ class ListNodeTypesRequest(proto.Message): number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListNodesNodeGroupsRequest(proto.Message): - r"""A request message for NodeGroups.ListNodes. See the method +class ListNodeGroupsRequest(proto.Message): + r"""A request message for NodeGroups.List. See the method description for details. @@ -52655,16 +53938,15 @@ class ListNodesNodeGroupsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -52691,7 +53973,8 @@ class ListNodesNodeGroupsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -52703,9 +53986,6 @@ class ListNodesNodeGroupsRequest(proto.Message): ``500``, inclusive. (Default: ``500``) This field is a member of `oneof`_ ``_max_results``. - node_group (str): - Name of the NodeGroup resource whose nodes - you want to list. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -52747,10 +54027,6 @@ class ListNodesNodeGroupsRequest(proto.Message): number=54715419, optional=True, ) - node_group: str = proto.Field( - proto.STRING, - number=469958146, - ) order_by: str = proto.Field( proto.STRING, number=160562920, @@ -52776,8 +54052,8 @@ class ListNodesNodeGroupsRequest(proto.Message): ) -class ListPacketMirroringsRequest(proto.Message): - r"""A request message for PacketMirrorings.List. See the method +class ListNodeTemplatesRequest(proto.Message): + r"""A request message for NodeTemplates.List. See the method description for details. @@ -52789,16 +54065,15 @@ class ListPacketMirroringsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -52825,7 +54100,8 @@ class ListPacketMirroringsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -52859,7 +54135,7 @@ class ListPacketMirroringsRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region for this request. + The name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -52903,35 +54179,28 @@ class ListPacketMirroringsRequest(proto.Message): ) -class ListPeeringRoutesNetworksRequest(proto.Message): - r"""A request message for Networks.ListPeeringRoutes. See the - method description for details. +class ListNodeTypesRequest(proto.Message): + r"""A request message for NodeTypes.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - direction (str): - The direction of the exchanged routes. - Check the Direction enum for the list of - possible values. - - This field is a member of `oneof`_ ``_direction``. filter (str): A filter expression that filters resources listed in the response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -52958,7 +54227,8 @@ class ListPeeringRoutesNetworksRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -52970,8 +54240,6 @@ class ListPeeringRoutesNetworksRequest(proto.Message): ``500``, inclusive. (Default: ``500``) This field is a member of `oneof`_ ``_max_results``. - network (str): - Name of the network for this request. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -52991,48 +54259,18 @@ class ListPeeringRoutesNetworksRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. - peering_name (str): - The response will show routes exchanged over - the given peering connection. - - This field is a member of `oneof`_ ``_peering_name``. project (str): Project ID for this request. - region (str): - The region of the request. The response will - include all subnet routes, static routes and - dynamic routes in the region. - - This field is a member of `oneof`_ ``_region``. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. """ - class Direction(proto.Enum): - r"""The direction of the exchanged routes. - - Values: - UNDEFINED_DIRECTION (0): - A value indicating that the enum field is not - set. - INCOMING (338552870): - For routes exported from peer network. - OUTGOING (307438444): - For routes exported from local network. - """ - UNDEFINED_DIRECTION = 0 - INCOMING = 338552870 - OUTGOING = 307438444 - - direction: str = proto.Field( - proto.STRING, - number=111150975, - optional=True, - ) filter: str = proto.Field( proto.STRING, number=336120696, @@ -53043,10 +54281,6 @@ class Direction(proto.Enum): number=54715419, optional=True, ) - network: str = proto.Field( - proto.STRING, - number=232872494, - ) order_by: str = proto.Field( proto.STRING, number=160562920, @@ -53057,30 +54291,23 @@ class Direction(proto.Enum): number=19994697, optional=True, ) - peering_name: str = proto.Field( - proto.STRING, - number=249571370, - optional=True, - ) project: str = proto.Field( proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - optional=True, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): - r"""A request message for - InstanceGroupManagers.ListPerInstanceConfigs. See the method +class ListNodesNodeGroupsRequest(proto.Message): + r"""A request message for NodeGroups.ListNodes. See the method description for details. @@ -53092,16 +54319,15 @@ class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -53128,12 +54354,10 @@ class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. - instance_group_manager (str): - The name of the managed instance group. It - should conform to RFC1035. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -53143,6 +54367,9 @@ class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): ``500``, inclusive. (Default: ``500``) This field is a member of `oneof`_ ``_max_results``. + node_group (str): + Name of the NodeGroup resource whose nodes + you want to list. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -53171,9 +54398,7 @@ class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. zone (str): - The name of the zone where the managed - instance group is located. It should conform to - RFC1035. + The name of the zone for this request. """ filter: str = proto.Field( @@ -53181,15 +54406,15 @@ class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): number=336120696, optional=True, ) - instance_group_manager: str = proto.Field( - proto.STRING, - number=249363395, - ) max_results: int = proto.Field( proto.UINT32, number=54715419, optional=True, ) + node_group: str = proto.Field( + proto.STRING, + number=469958146, + ) order_by: str = proto.Field( proto.STRING, number=160562920, @@ -53215,10 +54440,9 @@ class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): ) -class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): - r"""A request message for - RegionInstanceGroupManagers.ListPerInstanceConfigs. See the - method description for details. +class ListPacketMirroringsRequest(proto.Message): + r"""A request message for PacketMirrorings.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -53229,16 +54453,15 @@ class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -53265,12 +54488,10 @@ class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. - instance_group_manager (str): - The name of the managed instance group. It - should conform to RFC1035. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -53302,8 +54523,7 @@ class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region scoping this request, - should conform to RFC1035. + Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -53317,10 +54537,6 @@ class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): number=336120696, optional=True, ) - instance_group_manager: str = proto.Field( - proto.STRING, - number=249363395, - ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -53351,30 +54567,34 @@ class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): ) -class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): - r"""A request message for - SecurityPolicies.ListPreconfiguredExpressionSets. See the method - description for details. +class ListPeeringRoutesNetworksRequest(proto.Message): + r"""A request message for Networks.ListPeeringRoutes. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + direction (str): + The direction of the exchanged routes. + Check the Direction enum for the list of + possible values. + + This field is a member of `oneof`_ ``_direction``. filter (str): A filter expression that filters resources listed in the response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -53401,7 +54621,8 @@ class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -53413,6 +54634,8 @@ class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): ``500``, inclusive. (Default: ``500``) This field is a member of `oneof`_ ``_max_results``. + network (str): + Name of the network for this request. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -53432,8 +54655,19 @@ class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. + peering_name (str): + The response will show routes exchanged over + the given peering connection. + + This field is a member of `oneof`_ ``_peering_name``. project (str): Project ID for this request. + region (str): + The region of the request. The response will + include all subnet routes, static routes and + dynamic routes in the region. + + This field is a member of `oneof`_ ``_region``. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -53442,6 +54676,27 @@ class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. """ + class Direction(proto.Enum): + r"""The direction of the exchanged routes. + + Values: + UNDEFINED_DIRECTION (0): + A value indicating that the enum field is not + set. + INCOMING (338552870): + For routes exported from peer network. + OUTGOING (307438444): + For routes exported from local network. + """ + UNDEFINED_DIRECTION = 0 + INCOMING = 338552870 + OUTGOING = 307438444 + + direction: str = proto.Field( + proto.STRING, + number=111150975, + optional=True, + ) filter: str = proto.Field( proto.STRING, number=336120696, @@ -53452,6 +54707,10 @@ class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): number=54715419, optional=True, ) + network: str = proto.Field( + proto.STRING, + number=232872494, + ) order_by: str = proto.Field( proto.STRING, number=160562920, @@ -53462,10 +54721,20 @@ class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): number=19994697, optional=True, ) + peering_name: str = proto.Field( + proto.STRING, + number=249571370, + optional=True, + ) project: str = proto.Field( proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -53473,9 +54742,10 @@ class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): ) -class ListPublicAdvertisedPrefixesRequest(proto.Message): - r"""A request message for PublicAdvertisedPrefixes.List. See the - method description for details. +class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): + r"""A request message for + InstanceGroupManagers.ListPerInstanceConfigs. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -53486,16 +54756,15 @@ class ListPublicAdvertisedPrefixesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -53522,9 +54791,13 @@ class ListPublicAdvertisedPrefixesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. It + should conform to RFC1035. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -53561,6 +54834,10 @@ class ListPublicAdvertisedPrefixesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone where the managed + instance group is located. It should conform to + RFC1035. """ filter: str = proto.Field( @@ -53568,6 +54845,10 @@ class ListPublicAdvertisedPrefixesRequest(proto.Message): number=336120696, optional=True, ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -53592,10 +54873,15 @@ class ListPublicAdvertisedPrefixesRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListPublicDelegatedPrefixesRequest(proto.Message): - r"""A request message for PublicDelegatedPrefixes.List. See the +class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.ListPerInstanceConfigs. See the method description for details. @@ -53607,16 +54893,15 @@ class ListPublicDelegatedPrefixesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -53643,9 +54928,13 @@ class ListPublicDelegatedPrefixesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. It + should conform to RFC1035. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -53677,7 +54966,8 @@ class ListPublicDelegatedPrefixesRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region of this request. + Name of the region scoping this request, + should conform to RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -53691,6 +54981,10 @@ class ListPublicDelegatedPrefixesRequest(proto.Message): number=336120696, optional=True, ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -53721,8 +55015,9 @@ class ListPublicDelegatedPrefixesRequest(proto.Message): ) -class ListReferrersInstancesRequest(proto.Message): - r"""A request message for Instances.ListReferrers. See the method +class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): + r"""A request message for + SecurityPolicies.ListPreconfiguredExpressionSets. See the method description for details. @@ -53734,16 +55029,15 @@ class ListReferrersInstancesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -53770,13 +55064,10 @@ class ListReferrersInstancesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. - instance (str): - Name of the target instance scoping this - request, or '-' if the request should span over - all instances in the container. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -53813,8 +55104,6 @@ class ListReferrersInstancesRequest(proto.Message): default value is false. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone for this request. """ filter: str = proto.Field( @@ -53822,10 +55111,6 @@ class ListReferrersInstancesRequest(proto.Message): number=336120696, optional=True, ) - instance: str = proto.Field( - proto.STRING, - number=18257045, - ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -53850,15 +55135,132 @@ class ListReferrersInstancesRequest(proto.Message): number=517198390, optional=True, ) - zone: str = proto.Field( + + +class ListPublicAdvertisedPrefixesRequest(proto.Message): + r"""A request message for PublicAdvertisedPrefixes.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( proto.STRING, - number=3744684, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, ) -class ListRegionAutoscalersRequest(proto.Message): - r"""A request message for RegionAutoscalers.List. See the method - description for details. +class ListPublicDelegatedPrefixesRequest(proto.Message): + r"""A request message for PublicDelegatedPrefixes.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -53869,16 +55271,15 @@ class ListRegionAutoscalersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -53905,7 +55306,8 @@ class ListRegionAutoscalersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -53939,7 +55341,7 @@ class ListRegionAutoscalersRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region scoping this request. + Name of the region of this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -53983,9 +55385,9 @@ class ListRegionAutoscalersRequest(proto.Message): ) -class ListRegionBackendServicesRequest(proto.Message): - r"""A request message for RegionBackendServices.List. See the - method description for details. +class ListReferrersInstancesRequest(proto.Message): + r"""A request message for Instances.ListReferrers. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -53996,16 +55398,15 @@ class ListRegionBackendServicesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -54032,7 +55433,270 @@ class ListRegionBackendServicesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. + + This field is a member of `oneof`_ ``_filter``. + instance (str): + Name of the target instance scoping this + request, or '-' if the request should span over + all instances in the container. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListRegionAutoscalersRequest(proto.Message): + r"""A request message for RegionAutoscalers.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionBackendServicesRequest(proto.Message): + r"""A request message for RegionBackendServices.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -54123,16 +55787,15 @@ class ListRegionCommitmentsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -54159,7 +55822,8 @@ class ListRegionCommitmentsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -54250,16 +55914,15 @@ class ListRegionDiskTypesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -54286,7 +55949,8 @@ class ListRegionDiskTypesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -54377,16 +56041,15 @@ class ListRegionDisksRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -54413,7 +56076,8 @@ class ListRegionDisksRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -54504,16 +56168,15 @@ class ListRegionHealthCheckServicesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -54540,7 +56203,8 @@ class ListRegionHealthCheckServicesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -54631,16 +56295,15 @@ class ListRegionHealthChecksRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -54667,7 +56330,8 @@ class ListRegionHealthChecksRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -54758,16 +56422,15 @@ class ListRegionInstanceGroupManagersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -54794,7 +56457,8 @@ class ListRegionInstanceGroupManagersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -54885,16 +56549,15 @@ class ListRegionInstanceGroupsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -54921,7 +56584,8 @@ class ListRegionInstanceGroupsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -55012,16 +56676,15 @@ class ListRegionInstanceTemplatesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -55048,7 +56711,8 @@ class ListRegionInstanceTemplatesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -55139,16 +56803,15 @@ class ListRegionNetworkEndpointGroupsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -55175,7 +56838,8 @@ class ListRegionNetworkEndpointGroupsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -55268,16 +56932,15 @@ class ListRegionNetworkFirewallPoliciesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -55304,7 +56967,8 @@ class ListRegionNetworkFirewallPoliciesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -55395,16 +57059,15 @@ class ListRegionNotificationEndpointsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -55431,7 +57094,8 @@ class ListRegionNotificationEndpointsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -55522,16 +57186,15 @@ class ListRegionOperationsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -55558,7 +57221,8 @@ class ListRegionOperationsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -55649,16 +57313,15 @@ class ListRegionSecurityPoliciesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -55685,7 +57348,8 @@ class ListRegionSecurityPoliciesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -55776,16 +57440,15 @@ class ListRegionSslCertificatesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -55812,7 +57475,8 @@ class ListRegionSslCertificatesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -55903,16 +57567,15 @@ class ListRegionSslPoliciesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -55939,7 +57602,8 @@ class ListRegionSslPoliciesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -56030,16 +57694,15 @@ class ListRegionTargetHttpProxiesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -56066,7 +57729,8 @@ class ListRegionTargetHttpProxiesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -56157,16 +57821,15 @@ class ListRegionTargetHttpsProxiesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -56193,7 +57856,8 @@ class ListRegionTargetHttpsProxiesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -56284,16 +57948,15 @@ class ListRegionTargetTcpProxiesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -56320,7 +57983,8 @@ class ListRegionTargetTcpProxiesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -56411,16 +58075,15 @@ class ListRegionUrlMapsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -56447,7 +58110,8 @@ class ListRegionUrlMapsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -56538,16 +58202,15 @@ class ListRegionsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -56574,7 +58237,8 @@ class ListRegionsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -56659,16 +58323,15 @@ class ListReservationsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -56695,7 +58358,8 @@ class ListReservationsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -56786,16 +58450,15 @@ class ListResourcePoliciesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -56822,7 +58485,8 @@ class ListResourcePoliciesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -56913,16 +58577,15 @@ class ListRoutersRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -56949,7 +58612,8 @@ class ListRoutersRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -57040,16 +58704,15 @@ class ListRoutesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -57076,7 +58739,8 @@ class ListRoutesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -57161,16 +58825,15 @@ class ListSecurityPoliciesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -57197,7 +58860,8 @@ class ListSecurityPoliciesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -57282,16 +58946,15 @@ class ListServiceAttachmentsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -57318,7 +58981,8 @@ class ListServiceAttachmentsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -57409,16 +59073,15 @@ class ListSnapshotsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -57445,7 +59108,8 @@ class ListSnapshotsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -57530,16 +59194,15 @@ class ListSslCertificatesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -57566,7 +59229,8 @@ class ListSslCertificatesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -57651,16 +59315,15 @@ class ListSslPoliciesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -57687,7 +59350,8 @@ class ListSslPoliciesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -57772,16 +59436,15 @@ class ListSubnetworksRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -57808,7 +59471,8 @@ class ListSubnetworksRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -57899,16 +59563,15 @@ class ListTargetGrpcProxiesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -57935,7 +59598,8 @@ class ListTargetGrpcProxiesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -58020,16 +59684,15 @@ class ListTargetHttpProxiesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -58056,7 +59719,8 @@ class ListTargetHttpProxiesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -58141,16 +59805,15 @@ class ListTargetHttpsProxiesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -58177,7 +59840,8 @@ class ListTargetHttpsProxiesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -58262,16 +59926,15 @@ class ListTargetInstancesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -58298,7 +59961,8 @@ class ListTargetInstancesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -58389,16 +60053,15 @@ class ListTargetPoolsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -58425,7 +60088,8 @@ class ListTargetPoolsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -58516,16 +60180,15 @@ class ListTargetSslProxiesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -58552,7 +60215,8 @@ class ListTargetSslProxiesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -58637,16 +60301,15 @@ class ListTargetTcpProxiesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -58673,7 +60336,8 @@ class ListTargetTcpProxiesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -58758,16 +60422,15 @@ class ListTargetVpnGatewaysRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -58794,7 +60457,8 @@ class ListTargetVpnGatewaysRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -58885,16 +60549,15 @@ class ListUrlMapsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -58921,7 +60584,8 @@ class ListUrlMapsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -58993,9 +60657,9 @@ class ListUrlMapsRequest(proto.Message): ) -class ListUsableSubnetworksRequest(proto.Message): - r"""A request message for Subnetworks.ListUsable. See the method - description for details. +class ListUsableBackendServicesRequest(proto.Message): + r"""A request message for BackendServices.ListUsable. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -59006,16 +60670,15 @@ class ListUsableSubnetworksRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -59042,7 +60705,8 @@ class ListUsableSubnetworksRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -59114,9 +60778,9 @@ class ListUsableSubnetworksRequest(proto.Message): ) -class ListVpnGatewaysRequest(proto.Message): - r"""A request message for VpnGateways.List. See the method - description for details. +class ListUsableRegionBackendServicesRequest(proto.Message): + r"""A request message for RegionBackendServices.ListUsable. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -59127,16 +60791,15 @@ class ListVpnGatewaysRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -59163,7 +60826,8 @@ class ListVpnGatewaysRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -59197,7 +60861,9 @@ class ListVpnGatewaysRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region for this request. + Name of the region scoping this request. It + must be a string that meets the requirements in + RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -59241,8 +60907,8 @@ class ListVpnGatewaysRequest(proto.Message): ) -class ListVpnTunnelsRequest(proto.Message): - r"""A request message for VpnTunnels.List. See the method +class ListUsableSubnetworksRequest(proto.Message): + r"""A request message for Subnetworks.ListUsable. See the method description for details. @@ -59254,16 +60920,15 @@ class ListVpnTunnelsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -59290,7 +60955,8 @@ class ListVpnTunnelsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -59323,8 +60989,6 @@ class ListVpnTunnelsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -59357,10 +61021,6 @@ class ListVpnTunnelsRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -59368,8 +61028,8 @@ class ListVpnTunnelsRequest(proto.Message): ) -class ListXpnHostsProjectsRequest(proto.Message): - r"""A request message for Projects.ListXpnHosts. See the method +class ListVpnGatewaysRequest(proto.Message): + r"""A request message for VpnGateways.List. See the method description for details. @@ -59381,16 +61041,15 @@ class ListXpnHostsProjectsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -59417,7 +61076,262 @@ class ListXpnHostsProjectsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListVpnTunnelsRequest(proto.Message): + r"""A request message for VpnTunnels.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListXpnHostsProjectsRequest(proto.Message): + r"""A request message for Projects.ListXpnHosts. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -59511,16 +61425,15 @@ class ListZoneOperationsRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -59547,7 +61460,8 @@ class ListZoneOperationsRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -59638,16 +61552,15 @@ class ListZonesRequest(proto.Message): response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal - AIP-160. If you want to use AIP-160, your expression must - specify the field name, an operator, and the value that you - want to use for filtering. The value must be a string, a - number, or a boolean. The operator must be either ``=``, - ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, - if you are filtering Compute Engine instances, you can - exclude instances named ``example-instance`` by specifying - ``name != example-instance``. The ``:`` operator can be used - with string fields to match substrings. For non-string - fields it is equivalent to the ``=`` operator. The ``:*`` + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` comparison can be used to test whether a key has been defined. For example, to find all objects with ``owner`` label use: ``labels.owner:*`` You can also filter nested @@ -59674,7 +61587,8 @@ class ListZonesRequest(proto.Message): Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name "instance", you would use - ``name ne .*instance``. + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. max_results (int): @@ -60891,6 +62805,11 @@ class ManagedInstance(proto.Message): or delete the instance. This field is a member of `oneof`_ ``_last_attempt``. + name (str): + [Output Only] The name of the instance. The name always + exists even if the instance has not yet been created. + + This field is a member of `oneof`_ ``_name``. preserved_state_from_config (google.cloud.compute_v1.types.PreservedState): [Output Only] Preserved state applied from per-instance config for this instance. @@ -61082,6 +63001,11 @@ class InstanceStatus(proto.Enum): optional=True, message="ManagedInstanceLastAttempt", ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) preserved_state_from_config: "PreservedState" = proto.Field( proto.MESSAGE, number=98661858, @@ -61673,6 +63597,127 @@ class NamedPort(proto.Message): ) +class NatIpInfo(proto.Message): + r"""Contains NAT IP information of a NAT config (i.e. usage + status, mode). + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + nat_ip_info_mappings (MutableSequence[google.cloud.compute_v1.types.NatIpInfoNatIpInfoMapping]): + A list of all NAT IPs assigned to this NAT + config. + nat_name (str): + Name of the NAT config which the NAT IP + belongs to. + + This field is a member of `oneof`_ ``_nat_name``. + """ + + nat_ip_info_mappings: MutableSequence[ + "NatIpInfoNatIpInfoMapping" + ] = proto.RepeatedField( + proto.MESSAGE, + number=241401884, + message="NatIpInfoNatIpInfoMapping", + ) + nat_name: str = proto.Field( + proto.STRING, + number=425596649, + optional=True, + ) + + +class NatIpInfoNatIpInfoMapping(proto.Message): + r"""Contains information of a NAT IP. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + mode (str): + Specifies whether NAT IP is auto or manual. + Check the Mode enum for the list of possible + values. + + This field is a member of `oneof`_ ``_mode``. + nat_ip (str): + NAT IP address. For example: 203.0.113.11. + + This field is a member of `oneof`_ ``_nat_ip``. + usage (str): + Specifies whether NAT IP is currently serving + at least one endpoint or not. Check the Usage + enum for the list of possible values. + + This field is a member of `oneof`_ ``_usage``. + """ + + class Mode(proto.Enum): + r"""Specifies whether NAT IP is auto or manual. + + Values: + UNDEFINED_MODE (0): + A value indicating that the enum field is not + set. + AUTO (2020783): + No description available. + MANUAL (119397318): + No description available. + """ + UNDEFINED_MODE = 0 + AUTO = 2020783 + MANUAL = 119397318 + + class Usage(proto.Enum): + r"""Specifies whether NAT IP is currently serving at least one + endpoint or not. + + Values: + UNDEFINED_USAGE (0): + A value indicating that the enum field is not + set. + IN_USE (17393485): + No description available. + UNUSED (360643030): + No description available. + """ + UNDEFINED_USAGE = 0 + IN_USE = 17393485 + UNUSED = 360643030 + + mode: str = proto.Field( + proto.STRING, + number=3357091, + optional=True, + ) + nat_ip: str = proto.Field( + proto.STRING, + number=21113093, + optional=True, + ) + usage: str = proto.Field( + proto.STRING, + number=111574433, + optional=True, + ) + + +class NatIpInfoResponse(proto.Message): + r""" + + Attributes: + result (MutableSequence[google.cloud.compute_v1.types.NatIpInfo]): + [Output Only] A list of NAT IP information. + """ + + result: MutableSequence["NatIpInfo"] = proto.RepeatedField( + proto.MESSAGE, + number=139315229, + message="NatIpInfo", + ) + + class Network(proto.Message): r"""Represents a VPC Network resource. Networks connect resources to each other and to the internet. For more information, read @@ -62186,6 +64231,13 @@ class NetworkAttachmentConnectedEndpoint(proto.Message): range in case of Serverless. This field is a member of `oneof`_ ``_ip_address``. + ipv6_address (str): + The IPv6 address assigned to the producer instance network + interface. This is only assigned when the stack types of + both the instance network interface and the consumer subnet + are IPv4_IPv6. + + This field is a member of `oneof`_ ``_ipv6_address``. project_id_or_num (str): The project id or number of the interface to which the IP was assigned. @@ -62204,6 +64256,11 @@ class NetworkAttachmentConnectedEndpoint(proto.Message): producer instance network interface. This field is a member of `oneof`_ ``_subnetwork``. + subnetwork_cidr_range (str): + [Output Only] The CIDR range of the subnet from which the + IPv4 internal IP was allocated from. + + This field is a member of `oneof`_ ``_subnetwork_cidr_range``. """ class Status(proto.Enum): @@ -62245,6 +64302,11 @@ class Status(proto.Enum): number=406272220, optional=True, ) + ipv6_address: str = proto.Field( + proto.STRING, + number=341563804, + optional=True, + ) project_id_or_num: str = proto.Field( proto.STRING, number=349783336, @@ -62264,6 +64326,11 @@ class Status(proto.Enum): number=307827694, optional=True, ) + subnetwork_cidr_range: str = proto.Field( + proto.STRING, + number=383249827, + optional=True, + ) class NetworkAttachmentList(proto.Message): @@ -62693,10 +64760,8 @@ class NetworkEndpointGroup(proto.Message): r"""Represents a collection of network endpoints. A network endpoint group (NEG) defines how a set of endpoints should be reached, whether they are reachable, and where they are located. - For more information about using NEGs, see Setting up external - HTTP(S) Load Balancing with internet NEGs, Setting up zonal - NEGs, or Setting up external HTTP(S) Load Balancing with - serverless NEGs. + For more information about using NEGs for different use cases, + see Network endpoint groups overview. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -66271,7 +68336,7 @@ class Operation(proto.Message): global, regional or zonal. - For global operations, use the ``globalOperations`` resource. - For regional operations, use the ``regionOperations`` resource. - For zonal operations, use the - ``zonalOperations`` resource. For more information, read Global, + ``zoneOperations`` resource. For more information, read Global, Regional, and Zonal Resources. @@ -66324,6 +68389,9 @@ class Operation(proto.Message): This value is in RFC3339 text format. This field is a member of `oneof`_ ``_insert_time``. + instances_bulk_insert_operation_metadata (google.cloud.compute_v1.types.InstancesBulkInsertOperationMetadata): + + This field is a member of `oneof`_ ``_instances_bulk_insert_operation_metadata``. kind (str): [Output Only] Type of the resource. Always ``compute#operation`` for Operation resources. @@ -66363,6 +68431,12 @@ class Operation(proto.Message): [Output Only] Server-defined URL for the resource. This field is a member of `oneof`_ ``_self_link``. + set_common_instance_metadata_operation_metadata (google.cloud.compute_v1.types.SetCommonInstanceMetadataOperationMetadata): + [Output Only] If the operation is for + projects.setCommonInstanceMetadata, this field will contain + information on all underlying zonal actions and their state. + + This field is a member of `oneof`_ ``_set_common_instance_metadata_operation_metadata``. start_time (str): [Output Only] The time that this operation was started by the server. This value is in RFC3339 text format. @@ -66392,7 +68466,8 @@ class Operation(proto.Message): This field is a member of `oneof`_ ``_target_link``. user (str): [Output Only] User who requested the operation, for example: - ``user@example.com``. + ``user@example.com`` or + ``alice_smith_identifier (global/workforcePools/example-com-us-employees)``. This field is a member of `oneof`_ ``_user``. warnings (MutableSequence[google.cloud.compute_v1.types.Warnings]): @@ -66472,6 +68547,14 @@ class Status(proto.Enum): number=433722515, optional=True, ) + instances_bulk_insert_operation_metadata: "InstancesBulkInsertOperationMetadata" = ( + proto.Field( + proto.MESSAGE, + number=89146177, + optional=True, + message="InstancesBulkInsertOperationMetadata", + ) + ) kind: str = proto.Field( proto.STRING, number=3292052, @@ -66507,6 +68590,12 @@ class Status(proto.Enum): number=456214797, optional=True, ) + set_common_instance_metadata_operation_metadata: "SetCommonInstanceMetadataOperationMetadata" = proto.Field( + proto.MESSAGE, + number=490378980, + optional=True, + message="SetCommonInstanceMetadataOperationMetadata", + ) start_time: str = proto.Field( proto.STRING, number=37467274, @@ -68221,19 +70310,137 @@ class PatchInstanceGroupManagerRequest(proto.Message): 00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - zone (str): - The name of the zone where you want to create - the managed instance group. + zone (str): + The name of the zone where you want to create + the managed instance group. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_manager_resource: "InstanceGroupManager" = proto.Field( + proto.MESSAGE, + number=261063946, + message="InstanceGroupManager", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class PatchInterconnectAttachmentRequest(proto.Message): + r"""A request message for InterconnectAttachments.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + interconnect_attachment (str): + Name of the interconnect attachment to patch. + interconnect_attachment_resource (google.cloud.compute_v1.types.InterconnectAttachment): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + interconnect_attachment: str = proto.Field( + proto.STRING, + number=308135284, + ) + interconnect_attachment_resource: "InterconnectAttachment" = proto.Field( + proto.MESSAGE, + number=212341369, + message="InterconnectAttachment", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchInterconnectRequest(proto.Message): + r"""A request message for Interconnects.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + interconnect (str): + Name of the interconnect to update. + interconnect_resource (google.cloud.compute_v1.types.Interconnect): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ - instance_group_manager: str = proto.Field( + interconnect: str = proto.Field( proto.STRING, - number=249363395, + number=224601230, ) - instance_group_manager_resource: "InstanceGroupManager" = proto.Field( + interconnect_resource: "Interconnect" = proto.Field( proto.MESSAGE, - number=261063946, - message="InstanceGroupManager", + number=397611167, + message="Interconnect", ) project: str = proto.Field( proto.STRING, @@ -68244,56 +70451,51 @@ class PatchInstanceGroupManagerRequest(proto.Message): number=37109963, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class PatchInterconnectAttachmentRequest(proto.Message): - r"""A request message for InterconnectAttachments.Patch. See the +class PatchNetworkAttachmentRequest(proto.Message): + r"""A request message for NetworkAttachments.Patch. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - interconnect_attachment (str): - Name of the interconnect attachment to patch. - interconnect_attachment_resource (google.cloud.compute_v1.types.InterconnectAttachment): + network_attachment (str): + Name of the NetworkAttachment resource to + patch. + network_attachment_resource (google.cloud.compute_v1.types.NetworkAttachment): The body resource for this request project (str): Project ID for this request. region (str): - Name of the region scoping this request. + Name of the region for this request. request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. For example, consider a situation - where you make an initial request and the - request times out. If you make the request again - with the same request ID, the server can check - if original operation with the same request ID - was received, and if so, will ignore the second - request. This prevents clients from accidentally - creating duplicate commitments. The request ID - must be a valid UUID with the exception that - zero UUID is not supported ( - 00000000-0000-0000-0000-000000000000). + An optional request ID to identify requests. Specify a + unique request ID so that if you must retry your request, + the server will know to ignore the request if it has already + been completed. For example, consider a situation where you + make an initial request and the request times out. If you + make the request again with the same request ID, the server + can check if original operation with the same request ID was + received, and if so, will ignore the second request. This + prevents clients from accidentally creating duplicate + commitments. The request ID must be a valid UUID with the + exception that zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). end_interface: + MixerMutationRequestBuilder This field is a member of `oneof`_ ``_request_id``. """ - interconnect_attachment: str = proto.Field( + network_attachment: str = proto.Field( proto.STRING, - number=308135284, + number=224644052, ) - interconnect_attachment_resource: "InterconnectAttachment" = proto.Field( + network_attachment_resource: "NetworkAttachment" = proto.Field( proto.MESSAGE, - number=212341369, - message="InterconnectAttachment", + number=210974745, + message="NetworkAttachment", ) project: str = proto.Field( proto.STRING, @@ -68310,60 +70512,6 @@ class PatchInterconnectAttachmentRequest(proto.Message): ) -class PatchInterconnectRequest(proto.Message): - r"""A request message for Interconnects.Patch. See the method - description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - interconnect (str): - Name of the interconnect to update. - interconnect_resource (google.cloud.compute_v1.types.Interconnect): - The body resource for this request - project (str): - Project ID for this request. - request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. For example, consider a situation - where you make an initial request and the - request times out. If you make the request again - with the same request ID, the server can check - if original operation with the same request ID - was received, and if so, will ignore the second - request. This prevents clients from accidentally - creating duplicate commitments. The request ID - must be a valid UUID with the exception that - zero UUID is not supported ( - 00000000-0000-0000-0000-000000000000). - - This field is a member of `oneof`_ ``_request_id``. - """ - - interconnect: str = proto.Field( - proto.STRING, - number=224601230, - ) - interconnect_resource: "Interconnect" = proto.Field( - proto.MESSAGE, - number=397611167, - message="Interconnect", - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - request_id: str = proto.Field( - proto.STRING, - number=37109963, - optional=True, - ) - - class PatchNetworkEdgeSecurityServiceRequest(proto.Message): r"""A request message for NetworkEdgeSecurityServices.Patch. See the method description for details. @@ -69314,6 +71462,11 @@ class PatchRegionSecurityPolicyRequest(proto.Message): Name of the security policy to update. security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): The body resource for this request + update_mask (str): + Indicates fields to be cleared as part of + this request. + + This field is a member of `oneof`_ ``_update_mask``. """ project: str = proto.Field( @@ -69338,6 +71491,11 @@ class PatchRegionSecurityPolicyRequest(proto.Message): number=216159612, message="SecurityPolicy", ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) class PatchRegionSslPolicyRequest(proto.Message): @@ -69830,6 +71988,71 @@ class PatchRuleRegionNetworkFirewallPolicyRequest(proto.Message): ) +class PatchRuleRegionSecurityPolicyRequest(proto.Message): + r"""A request message for RegionSecurityPolicies.PatchRule. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + priority (int): + The priority of the rule to patch. + + This field is a member of `oneof`_ ``_priority``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + security_policy (str): + Name of the security policy to update. + security_policy_rule_resource (google.cloud.compute_v1.types.SecurityPolicyRule): + The body resource for this request + update_mask (str): + Indicates fields to be cleared as part of + this request. + + This field is a member of `oneof`_ ``_update_mask``. + validate_only (bool): + If true, the request will not be committed. + + This field is a member of `oneof`_ ``_validate_only``. + """ + + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + ) + security_policy_rule_resource: "SecurityPolicyRule" = proto.Field( + proto.MESSAGE, + number=402693443, + message="SecurityPolicyRule", + ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=242744629, + optional=True, + ) + + class PatchRuleSecurityPolicyRequest(proto.Message): r"""A request message for SecurityPolicies.PatchRule. See the method description for details. @@ -69848,6 +72071,11 @@ class PatchRuleSecurityPolicyRequest(proto.Message): Name of the security policy to update. security_policy_rule_resource (google.cloud.compute_v1.types.SecurityPolicyRule): The body resource for this request + update_mask (str): + Indicates fields to be cleared as part of + this request. + + This field is a member of `oneof`_ ``_update_mask``. validate_only (bool): If true, the request will not be committed. @@ -69872,6 +72100,11 @@ class PatchRuleSecurityPolicyRequest(proto.Message): number=402693443, message="SecurityPolicyRule", ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) validate_only: bool = proto.Field( proto.BOOL, number=242744629, @@ -69911,6 +72144,11 @@ class PatchSecurityPolicyRequest(proto.Message): Name of the security policy to update. security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): The body resource for this request + update_mask (str): + Indicates fields to be cleared as part of + this request. + + This field is a member of `oneof`_ ``_update_mask``. """ project: str = proto.Field( @@ -69931,6 +72169,11 @@ class PatchSecurityPolicyRequest(proto.Message): number=216159612, message="SecurityPolicy", ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) class PatchServiceAttachmentRequest(proto.Message): @@ -69997,6 +72240,64 @@ class PatchServiceAttachmentRequest(proto.Message): ) +class PatchSnapshotSettingRequest(proto.Message): + r"""A request message for SnapshotSettingsService.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + snapshot_settings_resource (google.cloud.compute_v1.types.SnapshotSettings): + The body resource for this request + update_mask (str): + update_mask indicates fields to be updated as part of this + request. + + This field is a member of `oneof`_ ``_update_mask``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + snapshot_settings_resource: "SnapshotSettings" = proto.Field( + proto.MESSAGE, + number=357664495, + message="SnapshotSettings", + ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) + + class PatchSslPolicyRequest(proto.Message): r"""A request message for SslPolicies.Patch. See the method description for details. @@ -70371,8 +72672,8 @@ class PathMatcher(proto.Message): defaultRouteAction cannot contain any weightedBackendServices. Only one of defaultRouteAction or defaultUrlRedirect must be - set. URL maps for Classic external HTTP(S) load - balancers only support the urlRewrite action + set. URL maps for classic Application Load + Balancers only support the urlRewrite action within a path matcher's defaultRouteAction. This field is a member of `oneof`_ ``_default_route_action``. @@ -70524,9 +72825,9 @@ class PathRule(proto.Message): set. Conversely if service is set, routeAction cannot contain any weightedBackendServices. Only one of routeAction or urlRedirect must be set. - URL maps for Classic external HTTP(S) load - balancers only support the urlRewrite action - within a path rule's routeAction. + URL maps for classic Application Load Balancers + only support the urlRewrite action within a path + rule's routeAction. This field is a member of `oneof`_ ``_route_action``. service (str): @@ -70698,25 +72999,10 @@ class Policy(proto.Message): resource, or both. To learn which resources support conditions in their IAM policies, see the `IAM documentation `__. - **JSON example:** { "bindings": [ { "role": - "roles/resourcemanager.organizationAdmin", "members": [ - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { - "role": "roles/resourcemanager.organizationViewer", "members": [ - "user:eve@example.com" ], "condition": { "title": "expirable - access", "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": - "BwWWja0YfJA=", "version": 3 } **YAML example:** bindings: - - members: - user:mike@example.com - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com role: - roles/resourcemanager.organizationAdmin - members: - - user:eve@example.com role: roles/resourcemanager.organizationViewer - condition: title: expirable access description: Does not grant - access after Sep 2020 expression: request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3 + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` For a description of IAM and its features, see the `IAM documentation `__. @@ -70843,6 +73129,14 @@ class PreservedState(proto.Message): Preserved disks defined for this instance. This map is keyed with the device names of the disks. + external_i_ps (MutableMapping[str, google.cloud.compute_v1.types.PreservedStatePreservedNetworkIp]): + Preserved external IPs defined for this + instance. This map is keyed with the name of the + network interface. + internal_i_ps (MutableMapping[str, google.cloud.compute_v1.types.PreservedStatePreservedNetworkIp]): + Preserved internal IPs defined for this + instance. This map is keyed with the name of the + network interface. metadata (MutableMapping[str, str]): Preserved metadata defined for this instance. """ @@ -70853,6 +73147,22 @@ class PreservedState(proto.Message): number=95594102, message="PreservedStatePreservedDisk", ) + external_i_ps: MutableMapping[ + str, "PreservedStatePreservedNetworkIp" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=532687245, + message="PreservedStatePreservedNetworkIp", + ) + internal_i_ps: MutableMapping[ + str, "PreservedStatePreservedNetworkIp" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=215731675, + message="PreservedStatePreservedNetworkIp", + ) metadata: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, @@ -70948,6 +73258,92 @@ class Mode(proto.Enum): ) +class PreservedStatePreservedNetworkIp(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + auto_delete (str): + These stateful IPs will never be released + during autohealing, update or VM instance + recreate operations. This flag is used to + configure if the IP reservation should be + deleted after it is no longer used by the group, + e.g. when the given instance or the whole group + is deleted. Check the AutoDelete enum for the + list of possible values. + + This field is a member of `oneof`_ ``_auto_delete``. + ip_address (google.cloud.compute_v1.types.PreservedStatePreservedNetworkIpIpAddress): + Ip address representation + + This field is a member of `oneof`_ ``_ip_address``. + """ + + class AutoDelete(proto.Enum): + r"""These stateful IPs will never be released during autohealing, + update or VM instance recreate operations. This flag is used to + configure if the IP reservation should be deleted after it is no + longer used by the group, e.g. when the given instance or the + whole group is deleted. + + Values: + UNDEFINED_AUTO_DELETE (0): + A value indicating that the enum field is not + set. + NEVER (74175084): + No description available. + ON_PERMANENT_INSTANCE_DELETION (95727719): + No description available. + """ + UNDEFINED_AUTO_DELETE = 0 + NEVER = 74175084 + ON_PERMANENT_INSTANCE_DELETION = 95727719 + + auto_delete: str = proto.Field( + proto.STRING, + number=464761403, + optional=True, + ) + ip_address: "PreservedStatePreservedNetworkIpIpAddress" = proto.Field( + proto.MESSAGE, + number=406272220, + optional=True, + message="PreservedStatePreservedNetworkIpIpAddress", + ) + + +class PreservedStatePreservedNetworkIpIpAddress(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + address (str): + The URL of the reservation for this IP + address. + + This field is a member of `oneof`_ ``_address``. + literal (str): + An IPv4 internal network address to assign to + the instance for this network interface. + + This field is a member of `oneof`_ ``_literal``. + """ + + address: str = proto.Field( + proto.STRING, + number=462920692, + optional=True, + ) + literal: str = proto.Field( + proto.STRING, + number=182460591, + optional=True, + ) + + class PreviewRouterRequest(proto.Message): r"""A request message for Routers.Preview. See the method description for details. @@ -71377,6 +73773,11 @@ class PublicAdvertisedPrefix(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + byoip_api_version (str): + [Output Only] The version of BYOIP API. Check the + ByoipApiVersion enum for the list of possible values. + + This field is a member of `oneof`_ ``_byoip_api_version``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. @@ -71433,6 +73834,18 @@ class PublicAdvertisedPrefix(proto.Message): except the last character, which cannot be a dash. This field is a member of `oneof`_ ``_name``. + pdp_scope (str): + Specifies how child public delegated prefix will be scoped. + It could be one of following values: - ``REGIONAL``: The + public delegated prefix is regional only. The provisioning + will take a few minutes. - ``GLOBAL``: The public delegated + prefix is global only. The provisioning will take ~4 weeks. + - ``GLOBAL_AND_REGIONAL`` [output only]: The public + delegated prefixes is BYOIP V1 legacy prefix. This is output + only value and no longer supported in BYOIP V2. Check the + PdpScope enum for the list of possible values. + + This field is a member of `oneof`_ ``_pdp_scope``. public_delegated_prefixs (MutableSequence[google.cloud.compute_v1.types.PublicAdvertisedPrefixPublicDelegatedPrefix]): [Output Only] The list of public delegated prefixes that exist for this public advertised prefix. @@ -71460,6 +73873,59 @@ class PublicAdvertisedPrefix(proto.Message): This field is a member of `oneof`_ ``_status``. """ + class ByoipApiVersion(proto.Enum): + r"""[Output Only] The version of BYOIP API. + + Values: + UNDEFINED_BYOIP_API_VERSION (0): + A value indicating that the enum field is not + set. + V1 (2715): + This public advertised prefix can be used to + create both regional and global public delegated + prefixes. It usually takes 4 weeks to create or + delete a public delegated prefix. The BGP status + cannot be changed. + V2 (2716): + This public advertised prefix can only be + used to create regional public delegated + prefixes. Public delegated prefix creation and + deletion takes minutes and the BGP status can be + modified. + """ + UNDEFINED_BYOIP_API_VERSION = 0 + V1 = 2715 + V2 = 2716 + + class PdpScope(proto.Enum): + r"""Specifies how child public delegated prefix will be scoped. It could + be one of following values: - ``REGIONAL``: The public delegated + prefix is regional only. The provisioning will take a few minutes. - + ``GLOBAL``: The public delegated prefix is global only. The + provisioning will take ~4 weeks. - ``GLOBAL_AND_REGIONAL`` [output + only]: The public delegated prefixes is BYOIP V1 legacy prefix. This + is output only value and no longer supported in BYOIP V2. + + Values: + UNDEFINED_PDP_SCOPE (0): + A value indicating that the enum field is not + set. + GLOBAL (494663587): + The public delegated prefix is global only. + The provisioning will take ~4 weeks. + GLOBAL_AND_REGIONAL (318053059): + The public delegated prefixes is BYOIP V1 + legacy prefix. This is output only value and no + longer supported in BYOIP V2. + REGIONAL (92288543): + The public delegated prefix is regional only. + The provisioning will take a few minutes. + """ + UNDEFINED_PDP_SCOPE = 0 + GLOBAL = 494663587 + GLOBAL_AND_REGIONAL = 318053059 + REGIONAL = 92288543 + class Status(proto.Enum): r"""The status of the public advertised prefix. Possible values include: - ``INITIAL``: RPKI validation is complete. - ``PTR_CONFIGURED``: @@ -71474,6 +73940,8 @@ class Status(proto.Enum): UNDEFINED_STATUS (0): A value indicating that the enum field is not set. + ANNOUNCED_TO_INTERNET (177880897): + The prefix is announced to Internet. INITIAL (518841124): RPKI validation is complete. PREFIX_CONFIGURATION_COMPLETE (480889551): @@ -71484,20 +73952,30 @@ class Status(proto.Enum): The prefix is being removed. PTR_CONFIGURED (513497167): User has configured the PTR. + READY_TO_ANNOUNCE (64641265): + The prefix is currently withdrawn but ready + to be announced. REVERSE_DNS_LOOKUP_FAILED (295755183): Reverse DNS lookup failed. VALIDATED (66197998): Reverse DNS lookup is successful. """ UNDEFINED_STATUS = 0 + ANNOUNCED_TO_INTERNET = 177880897 INITIAL = 518841124 PREFIX_CONFIGURATION_COMPLETE = 480889551 PREFIX_CONFIGURATION_IN_PROGRESS = 378550961 PREFIX_REMOVAL_IN_PROGRESS = 284375783 PTR_CONFIGURED = 513497167 + READY_TO_ANNOUNCE = 64641265 REVERSE_DNS_LOOKUP_FAILED = 295755183 VALIDATED = 66197998 + byoip_api_version: str = proto.Field( + proto.STRING, + number=162683283, + optional=True, + ) creation_timestamp: str = proto.Field( proto.STRING, number=30525366, @@ -71538,6 +74016,11 @@ class Status(proto.Enum): number=3373707, optional=True, ) + pdp_scope: str = proto.Field( + proto.STRING, + number=524264785, + optional=True, + ) public_delegated_prefixs: MutableSequence[ "PublicAdvertisedPrefixPublicDelegatedPrefix" ] = proto.RepeatedField( @@ -71713,6 +74196,11 @@ class PublicDelegatedPrefix(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + byoip_api_version (str): + [Output Only] The version of BYOIP API. Check the + ByoipApiVersion enum for the list of possible values. + + This field is a member of `oneof`_ ``_byoip_api_version``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. @@ -71800,6 +74288,27 @@ class PublicDelegatedPrefix(proto.Message): This field is a member of `oneof`_ ``_status``. """ + class ByoipApiVersion(proto.Enum): + r"""[Output Only] The version of BYOIP API. + + Values: + UNDEFINED_BYOIP_API_VERSION (0): + A value indicating that the enum field is not + set. + V1 (2715): + This public delegated prefix usually takes 4 + weeks to delete, and the BGP status cannot be + changed. Announce and Withdraw APIs can not be + used on this prefix. + V2 (2716): + This public delegated prefix takes minutes to + delete. Announce and Withdraw APIs can be used + on this prefix to change the BGP status. + """ + UNDEFINED_BYOIP_API_VERSION = 0 + V1 = 2715 + V2 = 2716 + class Status(proto.Enum): r"""[Output Only] The status of the public delegated prefix, which can be one of following values: - ``INITIALIZING`` The public delegated @@ -71815,6 +74324,12 @@ class Status(proto.Enum): set. ANNOUNCED (365103355): The public delegated prefix is active. + ANNOUNCED_TO_GOOGLE (454875705): + The prefix is announced within Google + network. + ANNOUNCED_TO_INTERNET (177880897): + The prefix is announced to Internet and + within Google. DELETING (528602024): The public delegated prefix is being deprovsioned. @@ -71827,10 +74342,17 @@ class Status(proto.Enum): """ UNDEFINED_STATUS = 0 ANNOUNCED = 365103355 + ANNOUNCED_TO_GOOGLE = 454875705 + ANNOUNCED_TO_INTERNET = 177880897 DELETING = 528602024 INITIALIZING = 306588749 READY_TO_ANNOUNCE = 64641265 + byoip_api_version: str = proto.Field( + proto.STRING, + number=162683283, + optional=True, + ) creation_timestamp: str = proto.Field( proto.STRING, number=30525366, @@ -74212,6 +76734,36 @@ def raw_page(self): ) +class RegionNetworkEndpointGroupsAttachEndpointsRequest(proto.Message): + r""" + + Attributes: + network_endpoints (MutableSequence[google.cloud.compute_v1.types.NetworkEndpoint]): + The list of network endpoints to be attached. + """ + + network_endpoints: MutableSequence["NetworkEndpoint"] = proto.RepeatedField( + proto.MESSAGE, + number=149850285, + message="NetworkEndpoint", + ) + + +class RegionNetworkEndpointGroupsDetachEndpointsRequest(proto.Message): + r""" + + Attributes: + network_endpoints (MutableSequence[google.cloud.compute_v1.types.NetworkEndpoint]): + The list of network endpoints to be detached. + """ + + network_endpoints: MutableSequence["NetworkEndpoint"] = proto.RepeatedField( + proto.MESSAGE, + number=149850285, + message="NetworkEndpoint", + ) + + class RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse(proto.Message): r""" @@ -75183,6 +77735,46 @@ class RemoveRuleRegionNetworkFirewallPolicyRequest(proto.Message): ) +class RemoveRuleRegionSecurityPolicyRequest(proto.Message): + r"""A request message for RegionSecurityPolicies.RemoveRule. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + priority (int): + The priority of the rule to remove from the + security policy. + + This field is a member of `oneof`_ ``_priority``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + security_policy (str): + Name of the security policy to update. + """ + + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + ) + + class RemoveRuleSecurityPolicyRequest(proto.Message): r"""A request message for SecurityPolicies.RemoveRule. See the method description for details. @@ -77155,6 +79747,9 @@ class ResourceStatus(proto.Message): running. This field is a member of `oneof`_ ``_physical_host``. + upcoming_maintenance (google.cloud.compute_v1.types.UpcomingMaintenance): + + This field is a member of `oneof`_ ``_upcoming_maintenance``. """ physical_host: str = proto.Field( @@ -77162,6 +79757,12 @@ class ResourceStatus(proto.Message): number=464370704, optional=True, ) + upcoming_maintenance: "UpcomingMaintenance" = proto.Field( + proto.MESSAGE, + number=227348592, + optional=True, + message="UpcomingMaintenance", + ) class ResumeInstanceRequest(proto.Message): @@ -77241,7 +79842,10 @@ class Route(proto.Message): dest_range (str): The destination range of outgoing packets that this route applies to. Both IPv4 and IPv6 - are supported. + are supported. Must specify an IPv4 range (e.g. + 192.0.2.0/24) or an IPv6 range in RFC 4291 + format (e.g. 2001:db8::/32). IPv6 range will be + displayed using RFC 5952 compressed format. This field is a member of `oneof`_ ``_dest_range``. id (int): @@ -77304,8 +79908,15 @@ class Route(proto.Message): This field is a member of `oneof`_ ``_next_hop_instance``. next_hop_ip (str): The network IP address of an instance that - should handle matching packets. Only IPv4 is - supported. + should handle matching packets. Both IPv6 + address and IPv4 addresses are supported. Must + specify an IPv4 address in dot-decimal notation + (e.g. 192.0.2.99) or an IPv6 address in RFC 4291 + format (e.g. 2001:db8::2d9:51:0:0 or + 2001:db8:0:0:2d9:51:0:0). IPv6 addresses will be + displayed using RFC 5952 compressed format (e.g. + 2001:db8::2d9:51:0:0). Should never be an + IPv4-mapped IPv6 address. This field is a member of `oneof`_ ``_next_hop_ip``. next_hop_network (str): @@ -77702,10 +80313,14 @@ class Router(proto.Message): This field is a member of `oneof`_ ``_id``. interfaces (MutableSequence[google.cloud.compute_v1.types.RouterInterface]): - Router interfaces. Each interface requires - either one linked resource, (for example, - linkedVpnTunnel), or IP address and IP address - range (for example, ipRange), or both. + Router interfaces. To create a BGP peer that + uses a router interface, the interface must have + one of the following fields specified: - + linkedVpnTunnel - linkedInterconnectAttachment - + subnetwork You can create a router interface + without any of these fields specified. However, + you cannot create a BGP peer that uses that + interface. kind (str): [Output Only] Type of resource. Always compute#router for routers. @@ -78500,15 +81115,15 @@ class RouterInterface(proto.Message): must be in the same region as the router. Each interface can have one linked resource, which can be a VPN tunnel, an Interconnect attachment, - or a virtual machine instance. + or a subnetwork. This field is a member of `oneof`_ ``_linked_interconnect_attachment``. linked_vpn_tunnel (str): URI of the linked VPN tunnel, which must be in the same region as the router. Each interface can have one linked resource, which can be a VPN - tunnel, an Interconnect attachment, or a virtual - machine instance. + tunnel, an Interconnect attachment, or a + subnetwork. This field is a member of `oneof`_ ``_linked_vpn_tunnel``. management_type (str): @@ -78731,7 +81346,7 @@ class RouterMd5AuthenticationKey(proto.Message): This field is a member of `oneof`_ ``_key``. name (str): Name used to identify the key. Must be unique - within a router. Must be referenced by at least + within a router. Must be referenced by exactly one bgpPeer. Must comply with RFC1035. This field is a member of `oneof`_ ``_name``. @@ -78763,10 +81378,11 @@ class RouterNat(proto.Message): Attributes: auto_network_tier (str): The network tier to use when automatically - reserving IP addresses. Must be one of: PREMIUM, - STANDARD. If not specified, PREMIUM tier will be - used. Check the AutoNetworkTier enum for the - list of possible values. + reserving NAT IP addresses. Must be one of: + PREMIUM, STANDARD. If not specified, then the + current project-level default tier is used. + Check the AutoNetworkTier enum for the list of + possible values. This field is a member of `oneof`_ ``_auto_network_tier``. drain_nat_ips (MutableSequence[str]): @@ -78888,6 +81504,13 @@ class RouterNat(proto.Message): connections. Defaults to 30s if not set. This field is a member of `oneof`_ ``_tcp_transitory_idle_timeout_sec``. + type_ (str): + Indicates whether this NAT is used for public + or private IP translation. If unspecified, it + defaults to PUBLIC. Check the Type enum for the + list of possible values. + + This field is a member of `oneof`_ ``_type``. udp_idle_timeout_sec (int): Timeout (in seconds) for UDP connections. Defaults to 30s if not set. @@ -78896,9 +81519,9 @@ class RouterNat(proto.Message): """ class AutoNetworkTier(proto.Enum): - r"""The network tier to use when automatically reserving IP + r"""The network tier to use when automatically reserving NAT IP addresses. Must be one of: PREMIUM, STANDARD. If not specified, - PREMIUM tier will be used. + then the current project-level default tier is used. Values: UNDEFINED_AUTO_NETWORK_TIER (0): @@ -78929,6 +81552,11 @@ class EndpointTypes(proto.Enum): UNDEFINED_ENDPOINT_TYPES (0): A value indicating that the enum field is not set. + ENDPOINT_TYPE_MANAGED_PROXY_LB (439196930): + This is used for regional Application Load + Balancers (internal and external) and regional + proxy Network Load Balancers (internal and + external) endpoints. ENDPOINT_TYPE_SWG (159344456): This is used for Secure Web Gateway endpoints. @@ -78936,6 +81564,7 @@ class EndpointTypes(proto.Enum): This is the default. """ UNDEFINED_ENDPOINT_TYPES = 0 + ENDPOINT_TYPE_MANAGED_PROXY_LB = 439196930 ENDPOINT_TYPE_SWG = 159344456 ENDPOINT_TYPE_VM = 57095474 @@ -78995,6 +81624,24 @@ class SourceSubnetworkIpRangesToNat(proto.Enum): ALL_SUBNETWORKS_ALL_PRIMARY_IP_RANGES = 185573819 LIST_OF_SUBNETWORKS = 517542270 + class Type(proto.Enum): + r"""Indicates whether this NAT is used for public or private IP + translation. If unspecified, it defaults to PUBLIC. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + PRIVATE (403485027): + NAT used for private IP translation. + PUBLIC (223389289): + NAT used for public IP translation. This is + the default. + """ + UNDEFINED_TYPE = 0 + PRIVATE = 403485027 + PUBLIC = 223389289 + auto_network_tier: str = proto.Field( proto.STRING, number=269770211, @@ -79083,6 +81730,11 @@ class SourceSubnetworkIpRangesToNat(proto.Enum): number=205028774, optional=True, ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) udp_idle_timeout_sec: int = proto.Field( proto.INT32, number=64919878, @@ -79176,7 +81828,7 @@ class RouterNatRule(proto.Message): '1.1.0.1' \|\| destination.ip == '8.8.8.8'" The following example is a valid match expression for private NAT: "nexthop.hub == - 'https://networkconnectivity.googleapis.com/v1alpha1/projects/my-project/global/hub/hub-1'". + '//networkconnectivity.googleapis.com/projects/my-project/locations/global/hubs/hub-1'". This field is a member of `oneof`_ ``_match``. rule_number (int): @@ -79220,22 +81872,41 @@ class RouterNatRuleAction(proto.Message): this NAT rule. These IP addresses must be valid static external IP addresses assigned to the project. This field is used for public NAT. + source_nat_active_ranges (MutableSequence[str]): + A list of URLs of the subnetworks used as source ranges for + this NAT Rule. These subnetworks must have purpose set to + PRIVATE_NAT. This field is used for private NAT. source_nat_drain_ips (MutableSequence[str]): A list of URLs of the IP resources to be drained. These IPs must be valid static external IPs that have been assigned to the NAT. These IPs should be used for updating/patching a NAT rule only. This field is used for public NAT. + source_nat_drain_ranges (MutableSequence[str]): + A list of URLs of subnetworks representing + source ranges to be drained. This is only + supported on patch/update, and these subnetworks + must have previously been used as active ranges + in this NAT Rule. This field is used for private + NAT. """ source_nat_active_ips: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=210378229, ) + source_nat_active_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=190556269, + ) source_nat_drain_ips: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=340812451, ) + source_nat_drain_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=84802815, + ) class RouterNatSubnetworkToNat(proto.Message): @@ -81061,6 +83732,15 @@ class SecurityPolicy(proto.Message): possible values. This field is a member of `oneof`_ ``_type``. + user_defined_fields (MutableSequence[google.cloud.compute_v1.types.SecurityPolicyUserDefinedField]): + Definitions of user-defined fields for CLOUD_ARMOR_NETWORK + policies. A user-defined field consists of up to 4 bytes + extracted from a fixed offset in the packet, relative to the + IPv4, IPv6, TCP, or UDP header, with an optional mask to + select certain bits. Rules may then specify matching values + for these fields. Example: userDefinedFields: - name: + "ipv4_fragment_offset" base: IPV4 offset: 6 size: 2 mask: + "0x1fff". """ class Type(proto.Enum): @@ -81183,6 +83863,13 @@ class Type(proto.Enum): number=3575610, optional=True, ) + user_defined_fields: MutableSequence[ + "SecurityPolicyUserDefinedField" + ] = proto.RepeatedField( + proto.MESSAGE, + number=28312739, + message="SecurityPolicyUserDefinedField", + ) class SecurityPolicyAdaptiveProtectionConfig(proto.Message): @@ -81230,6 +83917,9 @@ class SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(proto.Messag possible values. This field is a member of `oneof`_ ``_rule_visibility``. + threshold_configs (MutableSequence[google.cloud.compute_v1.types.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig]): + Configuration options for layer7 adaptive + protection for various customizable thresholds. """ class RuleVisibility(proto.Enum): @@ -81260,6 +83950,68 @@ class RuleVisibility(proto.Enum): number=453258293, optional=True, ) + threshold_configs: MutableSequence[ + "SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig" + ] = proto.RepeatedField( + proto.MESSAGE, + number=60347805, + message="SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig", + ) + + +class SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig( + proto.Message +): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + auto_deploy_confidence_threshold (float): + + This field is a member of `oneof`_ ``_auto_deploy_confidence_threshold``. + auto_deploy_expiration_sec (int): + + This field is a member of `oneof`_ ``_auto_deploy_expiration_sec``. + auto_deploy_impacted_baseline_threshold (float): + + This field is a member of `oneof`_ ``_auto_deploy_impacted_baseline_threshold``. + auto_deploy_load_threshold (float): + + This field is a member of `oneof`_ ``_auto_deploy_load_threshold``. + name (str): + The name must be 1-63 characters long, and + comply with RFC1035. The name must be unique + within the security policy. + + This field is a member of `oneof`_ ``_name``. + """ + + auto_deploy_confidence_threshold: float = proto.Field( + proto.FLOAT, + number=84309694, + optional=True, + ) + auto_deploy_expiration_sec: int = proto.Field( + proto.INT32, + number=69638793, + optional=True, + ) + auto_deploy_impacted_baseline_threshold: float = proto.Field( + proto.FLOAT, + number=292441667, + optional=True, + ) + auto_deploy_load_threshold: float = proto.Field( + proto.FLOAT, + number=522227738, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) class SecurityPolicyAdvancedOptionsConfig(proto.Message): @@ -81283,6 +84035,10 @@ class SecurityPolicyAdvancedOptionsConfig(proto.Message): possible values. This field is a member of `oneof`_ ``_log_level``. + user_ip_request_headers (MutableSequence[str]): + An optional list of case-insensitive request + header names to use for resolving the callers + client IP address. """ class JsonParsing(proto.Enum): @@ -81296,10 +84052,13 @@ class JsonParsing(proto.Enum): No description available. STANDARD (484642493): No description available. + STANDARD_WITH_GRAPHQL (106979218): + No description available. """ UNDEFINED_JSON_PARSING = 0 DISABLED = 516696700 STANDARD = 484642493 + STANDARD_WITH_GRAPHQL = 106979218 class LogLevel(proto.Enum): r""" @@ -81335,6 +84094,10 @@ class LogLevel(proto.Enum): number=140582601, optional=True, ) + user_ip_request_headers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=421050290, + ) class SecurityPolicyAdvancedOptionsConfigJsonCustomConfig(proto.Message): @@ -81553,6 +84316,36 @@ class SecurityPolicyRule(proto.Message): corresponding 'action' is enforced. This field is a member of `oneof`_ ``_match``. + network_match (google.cloud.compute_v1.types.SecurityPolicyRuleNetworkMatcher): + A match condition that incoming packets are evaluated + against for CLOUD_ARMOR_NETWORK security policies. If it + matches, the corresponding 'action' is enforced. The match + criteria for a rule consists of built-in match fields (like + 'srcIpRanges') and potentially multiple user-defined match + fields ('userDefinedFields'). Field values may be extracted + directly from the packet or derived from it (e.g. + 'srcRegionCodes'). Some fields may not be present in every + packet (e.g. 'srcPorts'). A user-defined field is only + present if the base header is found in the packet and the + entire field is in bounds. Each match field may specify + which values can match it, listing one or more ranges, + prefixes, or exact values that are considered a match for + the field. A field value must be present in order to match a + specified match field. If no match values are specified for + a match field, then any field value is considered to match + it, and it's not required to be present. For strings + specifying '*' is also equivalent to match all. For a packet + to match a rule, all specified match fields must match the + corresponding field values derived from the packet. Example: + networkMatch: srcIpRanges: - "192.0.2.0/24" - + "198.51.100.0/24" userDefinedFields: - name: + "ipv4_fragment_offset" values: - "1-0x1fff" The above match + condition matches packets with a source IP in 192.0.2.0/24 + or 198.51.100.0/24 and a user-defined field named + "ipv4_fragment_offset" with a value between 1 and 0x1fff + inclusive. + + This field is a member of `oneof`_ ``_network_match``. preconfigured_waf_config (google.cloud.compute_v1.types.SecurityPolicyRulePreconfiguredWafConfig): Preconfigured WAF configuration to be applied for the rule. If the rule does not evaluate @@ -81615,6 +84408,12 @@ class SecurityPolicyRule(proto.Message): optional=True, message="SecurityPolicyRuleMatcher", ) + network_match: "SecurityPolicyRuleNetworkMatcher" = proto.Field( + proto.MESSAGE, + number=463387764, + optional=True, + message="SecurityPolicyRuleNetworkMatcher", + ) preconfigured_waf_config: "SecurityPolicyRulePreconfiguredWafConfig" = proto.Field( proto.MESSAGE, number=117805027, @@ -81780,6 +84579,110 @@ class SecurityPolicyRuleMatcherConfig(proto.Message): ) +class SecurityPolicyRuleNetworkMatcher(proto.Message): + r"""Represents a match condition that incoming network traffic is + evaluated against. + + Attributes: + dest_ip_ranges (MutableSequence[str]): + Destination IPv4/IPv6 addresses or CIDR + prefixes, in standard text format. + dest_ports (MutableSequence[str]): + Destination port numbers for TCP/UDP/SCTP. + Each element can be a 16-bit unsigned decimal + number (e.g. "80") or range (e.g. "0-1023"). + ip_protocols (MutableSequence[str]): + IPv4 protocol / IPv6 next header (after + extension headers). Each element can be an 8-bit + unsigned decimal number (e.g. "6"), range (e.g. + "253-254"), or one of the following protocol + names: "tcp", "udp", "icmp", "esp", "ah", + "ipip", or "sctp". + src_asns (MutableSequence[int]): + BGP Autonomous System Number associated with + the source IP address. + src_ip_ranges (MutableSequence[str]): + Source IPv4/IPv6 addresses or CIDR prefixes, + in standard text format. + src_ports (MutableSequence[str]): + Source port numbers for TCP/UDP/SCTP. Each + element can be a 16-bit unsigned decimal number + (e.g. "80") or range (e.g. "0-1023"). + src_region_codes (MutableSequence[str]): + Two-letter ISO 3166-1 alpha-2 country code + associated with the source IP address. + user_defined_fields (MutableSequence[google.cloud.compute_v1.types.SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch]): + User-defined fields. Each element names a + defined field and lists the matching values for + that field. + """ + + dest_ip_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=337357713, + ) + dest_ports: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=379902005, + ) + ip_protocols: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=259213251, + ) + src_asns: MutableSequence[int] = proto.RepeatedField( + proto.UINT32, + number=117825266, + ) + src_ip_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=432128083, + ) + src_ports: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=445095415, + ) + src_region_codes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=99086742, + ) + user_defined_fields: MutableSequence[ + "SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch" + ] = proto.RepeatedField( + proto.MESSAGE, + number=28312739, + message="SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch", + ) + + +class SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of the user-defined field, as given in + the definition. + + This field is a member of `oneof`_ ``_name``. + values (MutableSequence[str]): + Matching values of the field. Each element + can be a 32-bit unsigned decimal or hexadecimal + (starting with "0x") number (e.g. "64") or range + (e.g. "0x400-0x7ff"). + """ + + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=249928994, + ) + + class SecurityPolicyRulePreconfiguredWafConfig(proto.Message): r""" @@ -82326,6 +85229,109 @@ class Type(proto.Enum): ) +class SecurityPolicyUserDefinedField(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + base (str): + The base relative to which 'offset' is + measured. Possible values are: - IPV4: Points to + the beginning of the IPv4 header. - IPV6: Points + to the beginning of the IPv6 header. - TCP: + Points to the beginning of the TCP header, + skipping over any IPv4 options or IPv6 extension + headers. Not present for non-first fragments. - + UDP: Points to the beginning of the UDP header, + skipping over any IPv4 options or IPv6 extension + headers. Not present for non-first fragments. + required Check the Base enum for the list of + possible values. + + This field is a member of `oneof`_ ``_base``. + mask (str): + If specified, apply this mask (bitwise AND) + to the field to ignore bits before matching. + Encoded as a hexadecimal number (starting with + "0x"). The last byte of the field (in network + byte order) corresponds to the least significant + byte of the mask. + + This field is a member of `oneof`_ ``_mask``. + name (str): + The name of this field. Must be unique within + the policy. + + This field is a member of `oneof`_ ``_name``. + offset (int): + Offset of the first byte of the field (in + network byte order) relative to 'base'. + + This field is a member of `oneof`_ ``_offset``. + size (int): + Size of the field in bytes. Valid values: + 1-4. + + This field is a member of `oneof`_ ``_size``. + """ + + class Base(proto.Enum): + r"""The base relative to which 'offset' is measured. Possible + values are: - IPV4: Points to the beginning of the IPv4 header. + - IPV6: Points to the beginning of the IPv6 header. - TCP: + Points to the beginning of the TCP header, skipping over any + IPv4 options or IPv6 extension headers. Not present for + non-first fragments. - UDP: Points to the beginning of the UDP + header, skipping over any IPv4 options or IPv6 extension + headers. Not present for non-first fragments. required + + Values: + UNDEFINED_BASE (0): + A value indicating that the enum field is not + set. + IPV4 (2254341): + No description available. + IPV6 (2254343): + No description available. + TCP (82881): + No description available. + UDP (83873): + No description available. + """ + UNDEFINED_BASE = 0 + IPV4 = 2254341 + IPV6 = 2254343 + TCP = 82881 + UDP = 83873 + + base: str = proto.Field( + proto.STRING, + number=3016401, + optional=True, + ) + mask: str = proto.Field( + proto.STRING, + number=3344108, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + offset: int = proto.Field( + proto.INT32, + number=53961875, + optional=True, + ) + size: int = proto.Field( + proto.INT32, + number=3530753, + optional=True, + ) + + class SecuritySettings(proto.Message): r"""The authentication and authorization settings for a BackendService. @@ -82334,6 +85340,13 @@ class SecuritySettings(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + aws_v4_authentication (google.cloud.compute_v1.types.AWSV4Signature): + The configuration needed to generate a signature for access + to private storage buckets that support AWS's Signature + Version 4 for authentication. Allowed only for + INTERNET_IP_PORT and INTERNET_FQDN_PORT NEG backends. + + This field is a member of `oneof`_ ``_aws_v4_authentication``. client_tls_policy (str): Optional. A URL referring to a networksecurity.ClientTlsPolicy resource that describes how @@ -82362,6 +85375,12 @@ class SecuritySettings(proto.Message): mode). """ + aws_v4_authentication: "AWSV4Signature" = proto.Field( + proto.MESSAGE, + number=433993111, + optional=True, + message="AWSV4Signature", + ) client_tls_policy: str = proto.Field( proto.STRING, number=462325226, @@ -82654,7 +85673,7 @@ class ServiceAttachment(proto.Message): example, an ACCEPTED PSC endpoint will be moved to REJECTED if its project is added to the reject list. For newly created service - attachment, this boolean defaults to true. + attachment, this boolean defaults to false. This field is a member of `oneof`_ ``_reconcile_connections``. region (str): @@ -83396,6 +86415,101 @@ class SetCertificateMapTargetSslProxyRequest(proto.Message): ) +class SetCommonInstanceMetadataOperationMetadata(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + client_operation_id (str): + [Output Only] The client operation id. + + This field is a member of `oneof`_ ``_client_operation_id``. + per_location_operations (MutableMapping[str, google.cloud.compute_v1.types.SetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo]): + [Output Only] Status information per location (location name + is key). Example key: zones/us-central1-a + """ + + client_operation_id: str = proto.Field( + proto.STRING, + number=297240295, + optional=True, + ) + per_location_operations: MutableMapping[ + str, "SetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=408987796, + message="SetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo", + ) + + +class SetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + error (google.cloud.compute_v1.types.Status): + [Output Only] If state is ``ABANDONED`` or ``FAILED``, this + field is populated. + + This field is a member of `oneof`_ ``_error``. + state (str): + [Output Only] Status of the action, which can be one of the + following: ``PROPAGATING``, ``PROPAGATED``, ``ABANDONED``, + ``FAILED``, or ``DONE``. Check the State enum for the list + of possible values. + + This field is a member of `oneof`_ ``_state``. + """ + + class State(proto.Enum): + r"""[Output Only] Status of the action, which can be one of the + following: ``PROPAGATING``, ``PROPAGATED``, ``ABANDONED``, + ``FAILED``, or ``DONE``. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + ABANDONED (81797556): + Operation not tracked in this location e.g. + zone is marked as DOWN. + DONE (2104194): + Operation has completed successfully. + FAILED (455706685): + Operation is in an error state. + PROPAGATED (507550299): + Operation is confirmed to be in the location. + PROPAGATING (164807046): + Operation is not yet confirmed to have been + created in the location. + UNSPECIFIED (526786327): + No description available. + """ + UNDEFINED_STATE = 0 + ABANDONED = 81797556 + DONE = 2104194 + FAILED = 455706685 + PROPAGATED = 507550299 + PROPAGATING = 164807046 + UNSPECIFIED = 526786327 + + error: "Status" = proto.Field( + proto.MESSAGE, + number=96784904, + optional=True, + message="Status", + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + + class SetCommonInstanceMetadataProjectRequest(proto.Message): r"""A request message for Projects.SetCommonInstanceMetadata. See the method description for details. @@ -83735,6 +86849,34 @@ class SetEdgeSecurityPolicyBackendServiceRequest(proto.Message): ) +class SetIamPolicyBackendBucketRequest(proto.Message): + r"""A request message for BackendBuckets.SetIamPolicy. See the + method description for details. + + Attributes: + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_policy_request_resource: "GlobalSetPolicyRequest" = proto.Field( + proto.MESSAGE, + number=337048498, + message="GlobalSetPolicyRequest", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + class SetIamPolicyBackendServiceRequest(proto.Message): r"""A request message for BackendServices.SetIamPolicy. See the method description for details. @@ -86048,6 +89190,255 @@ class SetSecurityPolicyBackendServiceRequest(proto.Message): ) +class SetSecurityPolicyInstanceRequest(proto.Message): + r"""A request message for Instances.SetSecurityPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the Instance resource to which the + security policy should be set. The name should + conform to RFC1035. + instances_set_security_policy_request_resource (google.cloud.compute_v1.types.InstancesSetSecurityPolicyRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + Name of the zone scoping this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + instances_set_security_policy_request_resource: "InstancesSetSecurityPolicyRequest" = proto.Field( + proto.MESSAGE, + number=248424586, + message="InstancesSetSecurityPolicyRequest", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetSecurityPolicyRegionBackendServiceRequest(proto.Message): + r"""A request message for + RegionBackendServices.SetSecurityPolicy. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (str): + Name of the BackendService resource to which + the security policy should be set. The name + should conform to RFC1035. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + security_policy_reference_resource: "SecurityPolicyReference" = proto.Field( + proto.MESSAGE, + number=204135024, + message="SecurityPolicyReference", + ) + + +class SetSecurityPolicyTargetInstanceRequest(proto.Message): + r"""A request message for TargetInstances.SetSecurityPolicy. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + target_instance (str): + Name of the TargetInstance resource to which + the security policy should be set. The name + should conform to RFC1035. + zone (str): + Name of the zone scoping this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + security_policy_reference_resource: "SecurityPolicyReference" = proto.Field( + proto.MESSAGE, + number=204135024, + message="SecurityPolicyReference", + ) + target_instance: str = proto.Field( + proto.STRING, + number=289769347, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetSecurityPolicyTargetPoolRequest(proto.Message): + r"""A request message for TargetPools.SetSecurityPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + target_pool (str): + Name of the TargetPool resource to which the + security policy should be set. The name should + conform to RFC1035. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + security_policy_reference_resource: "SecurityPolicyReference" = proto.Field( + proto.MESSAGE, + number=204135024, + message="SecurityPolicyReference", + ) + target_pool: str = proto.Field( + proto.STRING, + number=62796298, + ) + + class SetServiceAccountInstanceRequest(proto.Message): r"""A request message for Instances.SetServiceAccount. See the method description for details. @@ -87455,6 +90846,11 @@ class Snapshot(proto.Message): snapshot to a disk. This field is a member of `oneof`_ ``_download_bytes``. + guest_os_features (MutableSequence[google.cloud.compute_v1.types.GuestOsFeature]): + [Output Only] A list of features to enable on the guest + operating system. Applicable only for bootable images. Read + Enabling guest operating system features to see a list of + available options. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. @@ -87549,6 +90945,11 @@ class Snapshot(proto.Message): protected by a customer-supplied encryption key. This field is a member of `oneof`_ ``_source_disk_encryption_key``. + source_disk_for_recovery_checkpoint (str): + The source disk whose recovery checkpoint + will be used to create this snapshot. + + This field is a member of `oneof`_ ``_source_disk_for_recovery_checkpoint``. source_disk_id (str): [Output Only] The ID value of the disk used to create this snapshot. This value may be used to determine whether the @@ -87716,6 +91117,11 @@ class StorageBytesStatus(proto.Enum): number=435054068, optional=True, ) + guest_os_features: MutableSequence["GuestOsFeature"] = proto.RepeatedField( + proto.MESSAGE, + number=79294545, + message="GuestOsFeature", + ) id: int = proto.Field( proto.UINT64, number=3355, @@ -87786,6 +91192,11 @@ class StorageBytesStatus(proto.Enum): optional=True, message="CustomerEncryptionKey", ) + source_disk_for_recovery_checkpoint: str = proto.Field( + proto.STRING, + number=359837950, + optional=True, + ) source_disk_id: str = proto.Field( proto.STRING, number=454190809, @@ -87895,6 +91306,110 @@ def raw_page(self): ) +class SnapshotSettings(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + storage_location (google.cloud.compute_v1.types.SnapshotSettingsStorageLocationSettings): + Policy of which storage location is going to + be resolved, and additional data that + particularizes how the policy is going to be + carried out. + + This field is a member of `oneof`_ ``_storage_location``. + """ + + storage_location: "SnapshotSettingsStorageLocationSettings" = proto.Field( + proto.MESSAGE, + number=460859641, + optional=True, + message="SnapshotSettingsStorageLocationSettings", + ) + + +class SnapshotSettingsStorageLocationSettings(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + locations (MutableMapping[str, google.cloud.compute_v1.types.SnapshotSettingsStorageLocationSettingsStorageLocationPreference]): + When the policy is SPECIFIC_LOCATIONS, snapshots will be + stored in the locations listed in this field. Keys are GCS + bucket locations. + policy (str): + The chosen location policy. + Check the Policy enum for the list of possible + values. + + This field is a member of `oneof`_ ``_policy``. + """ + + class Policy(proto.Enum): + r"""The chosen location policy. + + Values: + UNDEFINED_POLICY (0): + A value indicating that the enum field is not + set. + LOCAL_REGION (403535464): + Store snapshot in the same region as with the + originating disk. No additional parameters are + needed. + NEAREST_MULTI_REGION (212467515): + Store snapshot to the nearest multi region + GCS bucket, relative to the originating disk. No + additional parameters are needed. + SPECIFIC_LOCATIONS (280093809): + Store snapshot in the specific locations, as specified by + the user. The list of regions to store must be defined under + the ``locations`` field. + STORAGE_LOCATION_POLICY_UNSPECIFIED (250644592): + No description available. + """ + UNDEFINED_POLICY = 0 + LOCAL_REGION = 403535464 + NEAREST_MULTI_REGION = 212467515 + SPECIFIC_LOCATIONS = 280093809 + STORAGE_LOCATION_POLICY_UNSPECIFIED = 250644592 + + locations: MutableMapping[ + str, "SnapshotSettingsStorageLocationSettingsStorageLocationPreference" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=413423454, + message="SnapshotSettingsStorageLocationSettingsStorageLocationPreference", + ) + policy: str = proto.Field( + proto.STRING, + number=91071794, + optional=True, + ) + + +class SnapshotSettingsStorageLocationSettingsStorageLocationPreference(proto.Message): + r"""A structure for specifying storage locations. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of the location. It should be one of the + GCS buckets. + + This field is a member of `oneof`_ ``_name``. + """ + + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + + class SourceDiskEncryptionKey(proto.Message): r""" @@ -88153,13 +91668,16 @@ class KeyRevocationActionType(proto.Enum): class SslCertificate(proto.Message): - r"""Represents an SSL Certificate resource. Google Compute Engine has - two SSL Certificate resources: \* + r"""Represents an SSL certificate resource. Google Compute Engine has + two SSL certificate resources: \* `Global `__ \* `Regional `__ - The sslCertificates are used by: - external HTTPS load balancers - - SSL proxy load balancers The regionSslCertificates are used by - internal HTTPS load balancers. Optionally, certificate file contents + The global SSL certificates (sslCertificates) are used by: - Global + external Application Load Balancers - Classic Application Load + Balancers - Proxy Network Load Balancers (with target SSL proxies) + The regional SSL certificates (regionSslCertificates) are used by: - + Regional external Application Load Balancers - Regional internal + Application Load Balancers Optionally, certificate file contents that you upload can contain a set of up to five PEM-encoded certificates. The API call creates an object (sslCertificate) that holds this data. You can use SSL keys and certificates to secure @@ -89346,6 +92864,16 @@ class StatefulPolicyPreservedState(proto.Message): Disks created on the instances that will be preserved on instance delete, update, etc. This map is keyed with the device names of the disks. + external_i_ps (MutableMapping[str, google.cloud.compute_v1.types.StatefulPolicyPreservedStateNetworkIp]): + External network IPs assigned to the + instances that will be preserved on instance + delete, update, etc. This map is keyed with the + network interface name. + internal_i_ps (MutableMapping[str, google.cloud.compute_v1.types.StatefulPolicyPreservedStateNetworkIp]): + Internal network IPs assigned to the + instances that will be preserved on instance + delete, update, etc. This map is keyed with the + network interface name. """ disks: MutableMapping[ @@ -89356,6 +92884,22 @@ class StatefulPolicyPreservedState(proto.Message): number=95594102, message="StatefulPolicyPreservedStateDiskDevice", ) + external_i_ps: MutableMapping[ + str, "StatefulPolicyPreservedStateNetworkIp" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=532687245, + message="StatefulPolicyPreservedStateNetworkIp", + ) + internal_i_ps: MutableMapping[ + str, "StatefulPolicyPreservedStateNetworkIp" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=215731675, + message="StatefulPolicyPreservedStateNetworkIp", + ) class StatefulPolicyPreservedStateDiskDevice(proto.Message): @@ -89404,6 +92948,101 @@ class AutoDelete(proto.Enum): ) +class StatefulPolicyPreservedStateNetworkIp(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + auto_delete (str): + These stateful IPs will never be released + during autohealing, update or VM instance + recreate operations. This flag is used to + configure if the IP reservation should be + deleted after it is no longer used by the group, + e.g. when the given instance or the whole group + is deleted. Check the AutoDelete enum for the + list of possible values. + + This field is a member of `oneof`_ ``_auto_delete``. + """ + + class AutoDelete(proto.Enum): + r"""These stateful IPs will never be released during autohealing, + update or VM instance recreate operations. This flag is used to + configure if the IP reservation should be deleted after it is no + longer used by the group, e.g. when the given instance or the + whole group is deleted. + + Values: + UNDEFINED_AUTO_DELETE (0): + A value indicating that the enum field is not + set. + NEVER (74175084): + No description available. + ON_PERMANENT_INSTANCE_DELETION (95727719): + No description available. + """ + UNDEFINED_AUTO_DELETE = 0 + NEVER = 74175084 + ON_PERMANENT_INSTANCE_DELETION = 95727719 + + auto_delete: str = proto.Field( + proto.STRING, + number=464761403, + optional=True, + ) + + +class Status(proto.Message): + r"""The ``Status`` type defines a logical error model that is suitable + for different programming environments, including REST APIs and RPC + APIs. It is used by `gRPC `__. Each + ``Status`` message contains three pieces of data: error code, error + message, and error details. You can find out more about this error + model and how to work with it in the `API Design + Guide `__. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + code (int): + The status code, which should be an enum + value of google.rpc.Code. + + This field is a member of `oneof`_ ``_code``. + details (MutableSequence[google.protobuf.any_pb2.Any]): + A list of messages that carry the error + details. There is a common set of message types + for APIs to use. + message (str): + A developer-facing error message, which + should be in English. Any user-facing error + message should be localized and sent in the + google.rpc.Status.details field, or localized by + the client. + + This field is a member of `oneof`_ ``_message``. + """ + + code: int = proto.Field( + proto.INT32, + number=3059181, + optional=True, + ) + details: MutableSequence[any_pb2.Any] = proto.RepeatedField( + proto.MESSAGE, + number=483979842, + message=any_pb2.Any, + ) + message: str = proto.Field( + proto.STRING, + number=418054151, + optional=True, + ) + + class StopAsyncReplicationDiskRequest(proto.Message): r"""A request message for Disks.StopAsyncReplication. See the method description for details. @@ -89959,12 +93598,18 @@ class Purpose(proto.Enum): UNDEFINED_PURPOSE (0): A value indicating that the enum field is not set. + GLOBAL_MANAGED_PROXY (236463602): + Subnet reserved for Global Envoy-based Load + Balancing. INTERNAL_HTTPS_LOAD_BALANCER (248748889): Subnet reserved for Internal HTTP(S) Load Balancing. PRIVATE (403485027): Regular user created or automatically created subnet. + PRIVATE_NAT (367764517): + Subnetwork used as source range for Private + NAT Gateways. PRIVATE_RFC_1918 (254902107): Regular user created or automatically created subnet. @@ -89972,12 +93617,14 @@ class Purpose(proto.Enum): Subnetworks created for Private Service Connect in the producer network. REGIONAL_MANAGED_PROXY (153049966): - Subnetwork used for Regional - Internal/External HTTP(S) Load Balancing. + Subnetwork used for Regional Envoy-based Load + Balancing. """ UNDEFINED_PURPOSE = 0 + GLOBAL_MANAGED_PROXY = 236463602 INTERNAL_HTTPS_LOAD_BALANCER = 248748889 PRIVATE = 403485027 + PRIVATE_NAT = 367764517 PRIVATE_RFC_1918 = 254902107 PRIVATE_SERVICE_CONNECT = 48134724 REGIONAL_MANAGED_PROXY = 153049966 @@ -91192,12 +94839,15 @@ class TargetHttpProxy(proto.Message): two Target HTTP Proxy resources: \* `Global `__ \* `Regional `__ - A target HTTP proxy is a component of GCP HTTP load balancers. \* - targetHttpProxies are used by external HTTP load balancers and - Traffic Director. \* regionTargetHttpProxies are used by internal - HTTP load balancers. Forwarding rules reference a target HTTP proxy, - and the target proxy then references a URL map. For more - information, read Using Target Proxies and Forwarding rule concepts. + A target HTTP proxy is a component of Google Cloud HTTP load + balancers. \* targetHttpProxies are used by global external + Application Load Balancers, classic Application Load Balancers, + cross-region internal Application Load Balancers, and Traffic + Director. \* regionTargetHttpProxies are used by regional internal + Application Load Balancers and regional external Application Load + Balancers. Forwarding rules reference a target HTTP proxy, and the + target proxy then references a URL map. For more information, read + Using Target Proxies and Forwarding rule concepts. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -91231,12 +94881,11 @@ class TargetHttpProxy(proto.Message): after completing a response, while there is no matching traffic (in seconds). If an HTTP keep-alive is not specified, a default value - (610 seconds) will be used. For Global external - HTTP(S) load balancer, the minimum allowed value - is 5 seconds and the maximum allowed value is - 1200 seconds. For Global external HTTP(S) load - balancer (classic), this option is not available - publicly. + (610 seconds) will be used. For global external + Application Load Balancers, the minimum allowed + value is 5 seconds and the maximum allowed value + is 1200 seconds. For classic Application Load + Balancers, this option is not supported. This field is a member of `oneof`_ ``_http_keep_alive_timeout_sec``. id (int): @@ -91614,8 +95263,11 @@ class TargetHttpsProxy(proto.Message): `Global `__ \* `Regional `__ A target HTTPS proxy is a component of GCP HTTPS load balancers. \* - targetHttpsProxies are used by external HTTPS load balancers. \* - regionTargetHttpsProxies are used by internal HTTPS load balancers. + targetHttpProxies are used by global external Application Load + Balancers, classic Application Load Balancers, cross-region internal + Application Load Balancers, and Traffic Director. \* + regionTargetHttpProxies are used by regional internal Application + Load Balancers and regional external Application Load Balancers. Forwarding rules reference a target HTTPS proxy, and the target proxy then references a URL map. For more information, read Using Target Proxies and Forwarding rule concepts. @@ -91674,12 +95326,11 @@ class TargetHttpsProxy(proto.Message): after completing a response, while there is no matching traffic (in seconds). If an HTTP keep-alive is not specified, a default value - (610 seconds) will be used. For Global external - HTTP(S) load balancer, the minimum allowed value - is 5 seconds and the maximum allowed value is - 1200 seconds. For Global external HTTP(S) load - balancer (classic), this option is not available - publicly. + (610 seconds) will be used. For global external + Application Load Balancers, the minimum allowed + value is 5 seconds and the maximum allowed value + is 1200 seconds. For classic Application Load + Balancers, this option is not supported. This field is a member of `oneof`_ ``_http_keep_alive_timeout_sec``. id (int): @@ -92128,6 +95779,11 @@ class TargetInstance(proto.Message): the default network interface belongs to. This field is a member of `oneof`_ ``_network``. + security_policy (str): + [Output Only] The resource URL for the security policy + associated with this target instance. + + This field is a member of `oneof`_ ``_security_policy``. self_link (str): [Output Only] Server-defined URL for the resource. @@ -92196,6 +95852,11 @@ class NatPolicy(proto.Enum): number=232872494, optional=True, ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + optional=True, + ) self_link: str = proto.Field( proto.STRING, number=456214797, @@ -92482,6 +96143,11 @@ class TargetPool(proto.Message): resides. This field is a member of `oneof`_ ``_region``. + security_policy (str): + [Output Only] The resource URL for the security policy + associated with this target pool. + + This field is a member of `oneof`_ ``_security_policy``. self_link (str): [Output Only] Server-defined URL for the resource. @@ -92616,6 +96282,11 @@ class SessionAffinity(proto.Enum): number=138946292, optional=True, ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + optional=True, + ) self_link: str = proto.Field( proto.STRING, number=456214797, @@ -94113,6 +97784,62 @@ class TestFailure(proto.Message): ) +class TestIamPermissionsBackendBucketRequest(proto.Message): + r"""A request message for BackendBuckets.TestIamPermissions. See + the method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( + proto.MESSAGE, + number=439214758, + message="TestPermissionsRequest", + ) + + +class TestIamPermissionsBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.TestIamPermissions. See + the method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( + proto.MESSAGE, + number=439214758, + message="TestPermissionsRequest", + ) + + class TestIamPermissionsDiskRequest(proto.Message): r"""A request message for Disks.TestIamPermissions. See the method description for details. @@ -94571,6 +98298,41 @@ class TestIamPermissionsPacketMirroringRequest(proto.Message): ) +class TestIamPermissionsRegionBackendServiceRequest(proto.Message): + r"""A request message for + RegionBackendServices.TestIamPermissions. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( + proto.MESSAGE, + number=439214758, + message="TestPermissionsRequest", + ) + + class TestIamPermissionsRegionDiskRequest(proto.Message): r"""A request message for RegionDisks.TestIamPermissions. See the method description for details. @@ -94895,6 +98657,121 @@ class Uint128(proto.Message): ) +class UpcomingMaintenance(proto.Message): + r"""Upcoming Maintenance notification information. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + can_reschedule (bool): + Indicates if the maintenance can be customer + triggered. + + This field is a member of `oneof`_ ``_can_reschedule``. + latest_window_start_time (str): + The latest time for the planned maintenance + window to start. This timestamp value is in + RFC3339 text format. + + This field is a member of `oneof`_ ``_latest_window_start_time``. + maintenance_status (str): + Check the MaintenanceStatus enum for the list + of possible values. + + This field is a member of `oneof`_ ``_maintenance_status``. + type_ (str): + Defines the type of maintenance. + Check the Type enum for the list of possible + values. + + This field is a member of `oneof`_ ``_type``. + window_end_time (str): + The time by which the maintenance disruption + will be completed. This timestamp value is in + RFC3339 text format. + + This field is a member of `oneof`_ ``_window_end_time``. + window_start_time (str): + The current start time of the maintenance + window. This timestamp value is in RFC3339 text + format. + + This field is a member of `oneof`_ ``_window_start_time``. + """ + + class MaintenanceStatus(proto.Enum): + r""" + + Values: + UNDEFINED_MAINTENANCE_STATUS (0): + A value indicating that the enum field is not + set. + ONGOING (473158491): + There is ongoing maintenance on this VM. + PENDING (35394935): + There is pending maintenance. + UNKNOWN (433141802): + Unknown maintenance status. Do not use this + value. + """ + UNDEFINED_MAINTENANCE_STATUS = 0 + ONGOING = 473158491 + PENDING = 35394935 + UNKNOWN = 433141802 + + class Type(proto.Enum): + r"""Defines the type of maintenance. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + SCHEDULED (478400653): + Scheduled maintenance (e.g. maintenance after + uptime guarantee is complete). + UNKNOWN_TYPE (490705455): + No type specified. Do not use this value. + UNSCHEDULED (450077204): + Unscheduled maintenance (e.g. emergency + maintenance during uptime guarantee). + """ + UNDEFINED_TYPE = 0 + SCHEDULED = 478400653 + UNKNOWN_TYPE = 490705455 + UNSCHEDULED = 450077204 + + can_reschedule: bool = proto.Field( + proto.BOOL, + number=95981977, + optional=True, + ) + latest_window_start_time: str = proto.Field( + proto.STRING, + number=128032129, + optional=True, + ) + maintenance_status: str = proto.Field( + proto.STRING, + number=81645214, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + window_end_time: str = proto.Field( + proto.STRING, + number=271816480, + optional=True, + ) + window_start_time: str = proto.Field( + proto.STRING, + number=473061433, + optional=True, + ) + + class UpdateAccessConfigInstanceRequest(proto.Message): r"""A request message for Instances.UpdateAccessConfig. See the method description for details. @@ -96397,19 +100274,22 @@ class UrlMap(proto.Message): resources: \* `Global `__ \* `Regional `__ A URL map resource is a component of certain types of cloud load - balancers and Traffic Director: \* urlMaps are used by external - HTTP(S) load balancers and Traffic Director. \* regionUrlMaps are - used by internal HTTP(S) load balancers. For a list of supported URL - map features by the load balancer type, see the Load balancing - features: Routing and traffic management table. For a list of - supported URL map features for Traffic Director, see the Traffic - Director features: Routing and traffic management table. This - resource defines mappings from hostnames and URL paths to either a - backend service or a backend bucket. To use the global urlMaps - resource, the backend service must have a loadBalancingScheme of - either EXTERNAL or INTERNAL_SELF_MANAGED. To use the regionUrlMaps - resource, the backend service must have a loadBalancingScheme of - INTERNAL_MANAGED. For more information, read URL Map Concepts. + balancers and Traffic Director: \* urlMaps are used by global + external Application Load Balancers, classic Application Load + Balancers, and cross-region internal Application Load Balancers. \* + regionUrlMaps are used by internal Application Load Balancers, + regional external Application Load Balancers and regional internal + Application Load Balancers. For a list of supported URL map features + by the load balancer type, see the Load balancing features: Routing + and traffic management table. For a list of supported URL map + features for Traffic Director, see the Traffic Director features: + Routing and traffic management table. This resource defines mappings + from hostnames and URL paths to either a backend service or a + backend bucket. To use the global urlMaps resource, the backend + service must have a loadBalancingScheme of either EXTERNAL or + INTERNAL_SELF_MANAGED. To use the regionUrlMaps resource, the + backend service must have a loadBalancingScheme of INTERNAL_MANAGED. + For more information, read URL Map Concepts. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -96431,8 +100311,8 @@ class UrlMap(proto.Message): defaultRouteAction cannot contain any weightedBackendServices. Only one of defaultRouteAction or defaultUrlRedirect must be - set. URL maps for Classic external HTTP(S) load - balancers only support the urlRewrite action + set. URL maps for classic Application Load + Balancers only support the urlRewrite action within defaultRouteAction. defaultRouteAction has no effect when the URL map is bound to a target gRPC proxy that has the @@ -97002,20 +100882,21 @@ class UrlMapsValidateRequest(proto.Message): Attributes: load_balancing_schemes (MutableSequence[str]): Specifies the load balancer type(s) this validation request - is for. Use EXTERNAL_MANAGED for HTTP/HTTPS External Global - Load Balancer with Advanced Traffic Management. Use EXTERNAL - for Classic HTTP/HTTPS External Global Load Balancer. Other - load balancer types are not supported. For more information, - refer to Choosing a load balancer. If unspecified, the load - balancing scheme will be inferred from the backend service - resources this URL map references. If that can not be - inferred (for example, this URL map only references backend - buckets, or this Url map is for rewrites and redirects only - and doesn't reference any backends), EXTERNAL will be used - as the default type. If specified, the scheme(s) must not - conflict with the load balancing scheme of the backend - service resources this Url map references. Check the - LoadBalancingSchemes enum for the list of possible values. + is for. Use EXTERNAL_MANAGED for global external Application + Load Balancers and regional external Application Load + Balancers. Use EXTERNAL for classic Application Load + Balancers. Use INTERNAL_MANAGED for internal Application + Load Balancers. For more information, refer to Choosing a + load balancer. If unspecified, the load balancing scheme + will be inferred from the backend service resources this URL + map references. If that can not be inferred (for example, + this URL map only references backend buckets, or this Url + map is for rewrites and redirects only and doesn't reference + any backends), EXTERNAL will be used as the default type. If + specified, the scheme(s) must not conflict with the load + balancing scheme of the backend service resources this Url + map references. Check the LoadBalancingSchemes enum for the + list of possible values. resource (google.cloud.compute_v1.types.UrlMap): Content of the UrlMap to be validated. @@ -97030,17 +100911,18 @@ class LoadBalancingSchemes(proto.Enum): A value indicating that the enum field is not set. EXTERNAL (35607499): - Signifies that this will be used for Classic - L7 External Load Balancing. + Signifies that this will be used for classic + Application Load Balancers. EXTERNAL_MANAGED (512006923): Signifies that this will be used for - Envoy-based L7 External Load Balancing. + Envoy-based global external Application Load + Balancers. LOAD_BALANCING_SCHEME_UNSPECIFIED (526507452): If unspecified, the validation will try to infer the scheme from the backend service resources this Url map references. If the - inferrence is not possible, EXTERNAL will be - used as the default type. + inference is not possible, EXTERNAL will be used + as the default type. """ UNDEFINED_LOAD_BALANCING_SCHEMES = 0 EXTERNAL = 35607499 @@ -97266,12 +101148,18 @@ class Purpose(proto.Enum): UNDEFINED_PURPOSE (0): A value indicating that the enum field is not set. + GLOBAL_MANAGED_PROXY (236463602): + Subnet reserved for Global Envoy-based Load + Balancing. INTERNAL_HTTPS_LOAD_BALANCER (248748889): Subnet reserved for Internal HTTP(S) Load Balancing. PRIVATE (403485027): Regular user created or automatically created subnet. + PRIVATE_NAT (367764517): + Subnetwork used as source range for Private + NAT Gateways. PRIVATE_RFC_1918 (254902107): Regular user created or automatically created subnet. @@ -97279,12 +101167,14 @@ class Purpose(proto.Enum): Subnetworks created for Private Service Connect in the producer network. REGIONAL_MANAGED_PROXY (153049966): - Subnetwork used for Regional - Internal/External HTTP(S) Load Balancing. + Subnetwork used for Regional Envoy-based Load + Balancing. """ UNDEFINED_PURPOSE = 0 + GLOBAL_MANAGED_PROXY = 236463602 INTERNAL_HTTPS_LOAD_BALANCER = 248748889 PRIVATE = 403485027 + PRIVATE_NAT = 367764517 PRIVATE_RFC_1918 = 254902107 PRIVATE_SERVICE_CONNECT = 48134724 REGIONAL_MANAGED_PROXY = 153049966 @@ -98567,15 +102457,15 @@ class VpnTunnel(proto.Message): This field is a member of `oneof`_ ``_peer_external_gateway_interface``. peer_gcp_gateway (str): - URL of the peer side HA GCP VPN gateway to - which this VPN tunnel is connected. Provided by - the client when the VPN tunnel is created. This + URL of the peer side HA VPN gateway to which + this VPN tunnel is connected. Provided by the + client when the VPN tunnel is created. This field can be used when creating highly available VPN from VPC network to VPC network, the field is exclusive with the field peerExternalGateway. If provided, the VPN tunnel will automatically use the same vpnGatewayInterface ID in the peer - GCP VPN gateway. + Google Cloud VPN gateway. This field is a member of `oneof`_ ``_peer_gcp_gateway``. peer_ip (str): @@ -99599,6 +103489,110 @@ class WeightedBackendService(proto.Message): ) +class WithdrawPublicAdvertisedPrefixeRequest(proto.Message): + r"""A request message for PublicAdvertisedPrefixes.Withdraw. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + public_advertised_prefix (str): + The name of the public advertised prefix. It + should comply with RFC1035. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_advertised_prefix: str = proto.Field( + proto.STRING, + number=101874590, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class WithdrawPublicDelegatedPrefixeRequest(proto.Message): + r"""A request message for PublicDelegatedPrefixes.Withdraw. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + public_delegated_prefix (str): + The name of the public delegated prefix. It + should comply with RFC1035. + region (str): + The name of the region where the public + delegated prefix is located. It should comply + with RFC1035. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_delegated_prefix: str = proto.Field( + proto.STRING, + number=204238440, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + class XpnHostList(proto.Message): r""" diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_get_iam_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_get_iam_policy_sync.py new file mode 100644 index 000000000000..5b18912a731b --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendBuckets_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyBackendBucketRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendBuckets_GetIamPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_set_iam_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_set_iam_policy_sync.py new file mode 100644 index 000000000000..602dace4555f --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendBuckets_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyBackendBucketRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendBuckets_SetIamPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_test_iam_permissions_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_test_iam_permissions_sync.py new file mode 100644 index 000000000000..0f744d9577ad --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_test_iam_permissions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendBuckets_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsBackendBucketRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendBuckets_TestIamPermissions_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_services_list_usable_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_services_list_usable_sync.py new file mode 100644 index 000000000000..4b6705b64612 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_services_list_usable_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_ListUsable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_usable(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.ListUsableBackendServicesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_usable(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_BackendServices_ListUsable_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_services_test_iam_permissions_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_services_test_iam_permissions_sync.py new file mode 100644 index 000000000000..1698adc17f24 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_services_test_iam_permissions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsBackendServiceRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendServices_TestIamPermissions_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instances_set_security_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instances_set_security_policy_sync.py new file mode 100644 index 000000000000..d93aff18c80d --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instances_set_security_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetSecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SetSecurityPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_security_policy(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SetSecurityPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_interconnects_get_macsec_config_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_interconnects_get_macsec_config_sync.py new file mode 100644 index 000000000000..2263cdf36301 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_interconnects_get_macsec_config_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMacsecConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Interconnects_GetMacsecConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_macsec_config(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.GetMacsecConfigInterconnectRequest( + interconnect="interconnect_value", + project="project_value", + ) + + # Make the request + response = client.get_macsec_config(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Interconnects_GetMacsecConfig_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_network_attachments_patch_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_network_attachments_patch_sync.py new file mode 100644 index 000000000000..7b720b912544 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_network_attachments_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkAttachments_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.PatchNetworkAttachmentRequest( + network_attachment="network_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkAttachments_Patch_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_announce_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_announce_sync.py new file mode 100644 index 000000000000..68fb70bb3a6b --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_announce_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Announce +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicAdvertisedPrefixes_Announce_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_announce(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.AnnouncePublicAdvertisedPrefixeRequest( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + ) + + # Make the request + response = client.announce(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PublicAdvertisedPrefixes_Announce_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_withdraw_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_withdraw_sync.py new file mode 100644 index 000000000000..aeb1871bd9d8 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_withdraw_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Withdraw +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicAdvertisedPrefixes_Withdraw_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_withdraw(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.WithdrawPublicAdvertisedPrefixeRequest( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + ) + + # Make the request + response = client.withdraw(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PublicAdvertisedPrefixes_Withdraw_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_announce_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_announce_sync.py new file mode 100644 index 000000000000..c7b5574bb41b --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_announce_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Announce +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicDelegatedPrefixes_Announce_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_announce(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.AnnouncePublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + region="region_value", + ) + + # Make the request + response = client.announce(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PublicDelegatedPrefixes_Announce_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_withdraw_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_withdraw_sync.py new file mode 100644 index 000000000000..a8956cbc5054 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_withdraw_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Withdraw +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicDelegatedPrefixes_Withdraw_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_withdraw(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.WithdrawPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + region="region_value", + ) + + # Make the request + response = client.withdraw(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PublicDelegatedPrefixes_Withdraw_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_services_list_usable_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_services_list_usable_sync.py new file mode 100644 index 000000000000..cc5168465774 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_services_list_usable_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendServices_ListUsable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_usable(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.ListUsableRegionBackendServicesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list_usable(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionBackendServices_ListUsable_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_services_set_security_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_services_set_security_policy_sync.py new file mode 100644 index 000000000000..bf2d101410c7 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_services_set_security_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetSecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendServices_SetSecurityPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_security_policy(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionBackendServices_SetSecurityPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_services_test_iam_permissions_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_services_test_iam_permissions_sync.py new file mode 100644 index 000000000000..d07f9312e5bc --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_services_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendServices_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsRegionBackendServiceRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionBackendServices_TestIamPermissions_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_attach_network_endpoints_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_attach_network_endpoints_sync.py new file mode 100644 index 000000000000..b5319e1a261c --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_attach_network_endpoints_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AttachNetworkEndpoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkEndpointGroups_AttachNetworkEndpoints_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_attach_network_endpoints(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.attach_network_endpoints(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkEndpointGroups_AttachNetworkEndpoints_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_detach_network_endpoints_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_detach_network_endpoints_sync.py new file mode 100644 index 000000000000..17cf1dad0b18 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_detach_network_endpoints_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DetachNetworkEndpoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkEndpointGroups_DetachNetworkEndpoints_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_detach_network_endpoints(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.detach_network_endpoints(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkEndpointGroups_DetachNetworkEndpoints_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_list_network_endpoints_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_list_network_endpoints_sync.py new file mode 100644 index 000000000000..26d0a2a3ee1b --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_list_network_endpoints_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListNetworkEndpoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkEndpointGroups_ListNetworkEndpoints_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_network_endpoints(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list_network_endpoints(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionNetworkEndpointGroups_ListNetworkEndpoints_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_add_rule_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_add_rule_sync.py new file mode 100644 index 000000000000..8031c1ebc91a --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_add_rule_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSecurityPolicies_AddRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_rule(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddRuleRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.add_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSecurityPolicies_AddRule_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_get_rule_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_get_rule_sync.py new file mode 100644 index 000000000000..1355e1ec5804 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_get_rule_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSecurityPolicies_GetRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_rule(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRuleRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.get_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSecurityPolicies_GetRule_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_patch_rule_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_patch_rule_sync.py new file mode 100644 index 000000000000..6f6e2a166ae1 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_patch_rule_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PatchRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSecurityPolicies_PatchRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch_rule(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRuleRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.patch_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSecurityPolicies_PatchRule_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_remove_rule_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_remove_rule_sync.py new file mode 100644 index 000000000000..5d4f411adc21 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_security_policies_remove_rule_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSecurityPolicies_RemoveRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_rule(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveRuleRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.remove_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSecurityPolicies_RemoveRule_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_routers_get_nat_ip_info_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_routers_get_nat_ip_info_sync.py new file mode 100644 index 000000000000..aab2a1492690 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_routers_get_nat_ip_info_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetNatIpInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routers_GetNatIpInfo_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_nat_ip_info(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.GetNatIpInfoRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.get_nat_ip_info(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Routers_GetNatIpInfo_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_snapshot_settings_service_get_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_snapshot_settings_service_get_sync.py new file mode 100644 index 000000000000..0e030cc28e24 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_snapshot_settings_service_get_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SnapshotSettingsService_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.SnapshotSettingsServiceClient() + + # Initialize request argument(s) + request = compute_v1.GetSnapshotSettingRequest( + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SnapshotSettingsService_Get_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_snapshot_settings_service_patch_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_snapshot_settings_service_patch_sync.py new file mode 100644 index 000000000000..3793de54b1bc --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_snapshot_settings_service_patch_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SnapshotSettingsService_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.SnapshotSettingsServiceClient() + + # Initialize request argument(s) + request = compute_v1.PatchSnapshotSettingRequest( + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SnapshotSettingsService_Patch_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_target_instances_set_security_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_target_instances_set_security_policy_sync.py new file mode 100644 index 000000000000..6ceb7fdadb11 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_target_instances_set_security_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetSecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetInstances_SetSecurityPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_security_policy(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyTargetInstanceRequest( + project="project_value", + target_instance="target_instance_value", + zone="zone_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetInstances_SetSecurityPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_target_pools_set_security_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_target_pools_set_security_policy_sync.py new file mode 100644 index 000000000000..441f3a4196e6 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_target_pools_set_security_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetSecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetPools_SetSecurityPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_security_policy(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetPools_SetSecurityPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json index ebc934990a49..d0ee1e9191ab 100644 --- a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json +++ b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json @@ -1739,6 +1739,90 @@ ], "title": "compute_v1_generated_backend_buckets_delete_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendBucketsClient", + "shortName": "BackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.BackendBucketsClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.BackendBuckets.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.BackendBuckets", + "shortName": "BackendBuckets" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_backend_buckets_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendBuckets_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_buckets_get_iam_policy_sync.py" + }, { "canonical": true, "clientMethod": { @@ -2170,31 +2254,31 @@ "fullName": "google.cloud.compute_v1.BackendBucketsClient", "shortName": "BackendBucketsClient" }, - "fullName": "google.cloud.compute_v1.BackendBucketsClient.update", + "fullName": "google.cloud.compute_v1.BackendBucketsClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.BackendBuckets.Update", + "fullName": "google.cloud.compute.v1.BackendBuckets.SetIamPolicy", "service": { "fullName": "google.cloud.compute.v1.BackendBuckets", "shortName": "BackendBuckets" }, - "shortName": "Update" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.UpdateBackendBucketRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyBackendBucketRequest" }, { "name": "project", "type": "str" }, { - "name": "backend_bucket", + "name": "resource", "type": "str" }, { - "name": "backend_bucket_resource", - "type": "google.cloud.compute_v1.types.BackendBucket" + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" }, { "name": "retry", @@ -2209,14 +2293,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "update" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for Update", - "file": "compute_v1_generated_backend_buckets_update_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_backend_buckets_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendBuckets_Update_sync", + "regionTag": "compute_v1_generated_BackendBuckets_SetIamPolicy_sync", "segments": [ { "end": 52, @@ -2249,40 +2333,40 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_backend_buckets_update_sync.py" + "title": "compute_v1_generated_backend_buckets_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.BackendServicesClient", - "shortName": "BackendServicesClient" + "fullName": "google.cloud.compute_v1.BackendBucketsClient", + "shortName": "BackendBucketsClient" }, - "fullName": "google.cloud.compute_v1.BackendServicesClient.add_signed_url_key", + "fullName": "google.cloud.compute_v1.BackendBucketsClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.BackendServices.AddSignedUrlKey", + "fullName": "google.cloud.compute.v1.BackendBuckets.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.BackendServices", - "shortName": "BackendServices" + "fullName": "google.cloud.compute.v1.BackendBuckets", + "shortName": "BackendBuckets" }, - "shortName": "AddSignedUrlKey" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AddSignedUrlKeyBackendServiceRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsBackendBucketRequest" }, { "name": "project", "type": "str" }, { - "name": "backend_service", + "name": "resource", "type": "str" }, { - "name": "signed_url_key_resource", - "type": "google.cloud.compute_v1.types.SignedUrlKey" + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" }, { "name": "retry", @@ -2297,14 +2381,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "add_signed_url_key" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for AddSignedUrlKey", - "file": "compute_v1_generated_backend_services_add_signed_url_key_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_backend_buckets_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendServices_AddSignedUrlKey_sync", + "regionTag": "compute_v1_generated_BackendBuckets_TestIamPermissions_sync", "segments": [ { "end": 52, @@ -2337,120 +2421,40 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_backend_services_add_signed_url_key_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.compute_v1.BackendServicesClient", - "shortName": "BackendServicesClient" - }, - "fullName": "google.cloud.compute_v1.BackendServicesClient.aggregated_list", - "method": { - "fullName": "google.cloud.compute.v1.BackendServices.AggregatedList", - "service": { - "fullName": "google.cloud.compute.v1.BackendServices", - "shortName": "BackendServices" - }, - "shortName": "AggregatedList" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListBackendServicesRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.compute_v1.services.backend_services.pagers.AggregatedListPager", - "shortName": "aggregated_list" - }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_backend_services_aggregated_list_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendServices_AggregatedList_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "compute_v1_generated_backend_services_aggregated_list_sync.py" + "title": "compute_v1_generated_backend_buckets_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.BackendServicesClient", - "shortName": "BackendServicesClient" + "fullName": "google.cloud.compute_v1.BackendBucketsClient", + "shortName": "BackendBucketsClient" }, - "fullName": "google.cloud.compute_v1.BackendServicesClient.delete_signed_url_key", + "fullName": "google.cloud.compute_v1.BackendBucketsClient.update", "method": { - "fullName": "google.cloud.compute.v1.BackendServices.DeleteSignedUrlKey", + "fullName": "google.cloud.compute.v1.BackendBuckets.Update", "service": { - "fullName": "google.cloud.compute.v1.BackendServices", - "shortName": "BackendServices" + "fullName": "google.cloud.compute.v1.BackendBuckets", + "shortName": "BackendBuckets" }, - "shortName": "DeleteSignedUrlKey" + "shortName": "Update" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteSignedUrlKeyBackendServiceRequest" + "type": "google.cloud.compute_v1.types.UpdateBackendBucketRequest" }, { "name": "project", "type": "str" }, { - "name": "backend_service", + "name": "backend_bucket", "type": "str" }, { - "name": "key_name", - "type": "str" + "name": "backend_bucket_resource", + "type": "google.cloud.compute_v1.types.BackendBucket" }, { "name": "retry", @@ -2466,21 +2470,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete_signed_url_key" + "shortName": "update" }, - "description": "Sample for DeleteSignedUrlKey", - "file": "compute_v1_generated_backend_services_delete_signed_url_key_sync.py", + "description": "Sample for Update", + "file": "compute_v1_generated_backend_buckets_update_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendServices_DeleteSignedUrlKey_sync", + "regionTag": "compute_v1_generated_BackendBuckets_Update_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2490,22 +2494,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_backend_services_delete_signed_url_key_sync.py" + "title": "compute_v1_generated_backend_buckets_update_sync.py" }, { "canonical": true, @@ -2514,19 +2518,19 @@ "fullName": "google.cloud.compute_v1.BackendServicesClient", "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.BackendServicesClient.delete", + "fullName": "google.cloud.compute_v1.BackendServicesClient.add_signed_url_key", "method": { - "fullName": "google.cloud.compute.v1.BackendServices.Delete", + "fullName": "google.cloud.compute.v1.BackendServices.AddSignedUrlKey", "service": { "fullName": "google.cloud.compute.v1.BackendServices", "shortName": "BackendServices" }, - "shortName": "Delete" + "shortName": "AddSignedUrlKey" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteBackendServiceRequest" + "type": "google.cloud.compute_v1.types.AddSignedUrlKeyBackendServiceRequest" }, { "name": "project", @@ -2536,6 +2540,10 @@ "name": "backend_service", "type": "str" }, + { + "name": "signed_url_key_resource", + "type": "google.cloud.compute_v1.types.SignedUrlKey" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2550,13 +2558,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "add_signed_url_key" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_backend_services_delete_sync.py", + "description": "Sample for AddSignedUrlKey", + "file": "compute_v1_generated_backend_services_add_signed_url_key_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendServices_Delete_sync", + "regionTag": "compute_v1_generated_BackendServices_AddSignedUrlKey_sync", "segments": [ { "end": 52, @@ -2589,7 +2597,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_backend_services_delete_sync.py" + "title": "compute_v1_generated_backend_services_add_signed_url_key_sync.py" }, { "canonical": true, @@ -2598,32 +2606,24 @@ "fullName": "google.cloud.compute_v1.BackendServicesClient", "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.BackendServicesClient.get_health", + "fullName": "google.cloud.compute_v1.BackendServicesClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.BackendServices.GetHealth", + "fullName": "google.cloud.compute.v1.BackendServices.AggregatedList", "service": { "fullName": "google.cloud.compute.v1.BackendServices", "shortName": "BackendServices" }, - "shortName": "GetHealth" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetHealthBackendServiceRequest" + "type": "google.cloud.compute_v1.types.AggregatedListBackendServicesRequest" }, { "name": "project", "type": "str" }, - { - "name": "backend_service", - "type": "str" - }, - { - "name": "resource_group_reference_resource", - "type": "google.cloud.compute_v1.types.ResourceGroupReference" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2637,14 +2637,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.BackendServiceGroupHealth", - "shortName": "get_health" + "resultType": "google.cloud.compute_v1.services.backend_services.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for GetHealth", - "file": "compute_v1_generated_backend_services_get_health_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_backend_services_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendServices_GetHealth_sync", + "regionTag": "compute_v1_generated_BackendServices_AggregatedList_sync", "segments": [ { "end": 52, @@ -2662,22 +2662,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_backend_services_get_health_sync.py" + "title": "compute_v1_generated_backend_services_aggregated_list_sync.py" }, { "canonical": true, @@ -2686,26 +2686,30 @@ "fullName": "google.cloud.compute_v1.BackendServicesClient", "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.BackendServicesClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.BackendServicesClient.delete_signed_url_key", "method": { - "fullName": "google.cloud.compute.v1.BackendServices.GetIamPolicy", + "fullName": "google.cloud.compute.v1.BackendServices.DeleteSignedUrlKey", "service": { "fullName": "google.cloud.compute.v1.BackendServices", "shortName": "BackendServices" }, - "shortName": "GetIamPolicy" + "shortName": "DeleteSignedUrlKey" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyBackendServiceRequest" + "type": "google.cloud.compute_v1.types.DeleteSignedUrlKeyBackendServiceRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "backend_service", + "type": "str" + }, + { + "name": "key_name", "type": "str" }, { @@ -2721,22 +2725,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete_signed_url_key" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_backend_services_get_iam_policy_sync.py", + "description": "Sample for DeleteSignedUrlKey", + "file": "compute_v1_generated_backend_services_delete_signed_url_key_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendServices_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_BackendServices_DeleteSignedUrlKey_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -2746,22 +2750,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_backend_services_get_iam_policy_sync.py" + "title": "compute_v1_generated_backend_services_delete_signed_url_key_sync.py" }, { "canonical": true, @@ -2770,19 +2774,19 @@ "fullName": "google.cloud.compute_v1.BackendServicesClient", "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.BackendServicesClient.get", + "fullName": "google.cloud.compute_v1.BackendServicesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.BackendServices.Get", + "fullName": "google.cloud.compute.v1.BackendServices.Delete", "service": { "fullName": "google.cloud.compute.v1.BackendServices", "shortName": "BackendServices" }, - "shortName": "Get" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetBackendServiceRequest" + "type": "google.cloud.compute_v1.types.DeleteBackendServiceRequest" }, { "name": "project", @@ -2805,14 +2809,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.BackendService", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for Get", - "file": "compute_v1_generated_backend_services_get_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_backend_services_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendServices_Get_sync", + "regionTag": "compute_v1_generated_BackendServices_Delete_sync", "segments": [ { "end": 52, @@ -2845,7 +2849,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_backend_services_get_sync.py" + "title": "compute_v1_generated_backend_services_delete_sync.py" }, { "canonical": true, @@ -2854,27 +2858,31 @@ "fullName": "google.cloud.compute_v1.BackendServicesClient", "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.BackendServicesClient.insert", + "fullName": "google.cloud.compute_v1.BackendServicesClient.get_health", "method": { - "fullName": "google.cloud.compute.v1.BackendServices.Insert", + "fullName": "google.cloud.compute.v1.BackendServices.GetHealth", "service": { "fullName": "google.cloud.compute.v1.BackendServices", "shortName": "BackendServices" }, - "shortName": "Insert" + "shortName": "GetHealth" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertBackendServiceRequest" + "type": "google.cloud.compute_v1.types.GetHealthBackendServiceRequest" }, { "name": "project", "type": "str" }, { - "name": "backend_service_resource", - "type": "google.cloud.compute_v1.types.BackendService" + "name": "backend_service", + "type": "str" + }, + { + "name": "resource_group_reference_resource", + "type": "google.cloud.compute_v1.types.ResourceGroupReference" }, { "name": "retry", @@ -2889,22 +2897,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.types.BackendServiceGroupHealth", + "shortName": "get_health" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_backend_services_insert_sync.py", + "description": "Sample for GetHealth", + "file": "compute_v1_generated_backend_services_get_health_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendServices_Insert_sync", + "regionTag": "compute_v1_generated_BackendServices_GetHealth_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2914,22 +2922,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_backend_services_insert_sync.py" + "title": "compute_v1_generated_backend_services_get_health_sync.py" }, { "canonical": true, @@ -2938,24 +2946,28 @@ "fullName": "google.cloud.compute_v1.BackendServicesClient", "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.BackendServicesClient.list", + "fullName": "google.cloud.compute_v1.BackendServicesClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.BackendServices.List", + "fullName": "google.cloud.compute.v1.BackendServices.GetIamPolicy", "service": { "fullName": "google.cloud.compute.v1.BackendServices", "shortName": "BackendServices" }, - "shortName": "List" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListBackendServicesRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyBackendServiceRequest" }, { "name": "project", "type": "str" }, + { + "name": "resource", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2969,14 +2981,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.backend_services.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for List", - "file": "compute_v1_generated_backend_services_list_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_backend_services_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendServices_List_sync", + "regionTag": "compute_v1_generated_BackendServices_GetIamPolicy_sync", "segments": [ { "end": 52, @@ -2994,22 +3006,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_backend_services_list_sync.py" + "title": "compute_v1_generated_backend_services_get_iam_policy_sync.py" }, { "canonical": true, @@ -3018,19 +3030,19 @@ "fullName": "google.cloud.compute_v1.BackendServicesClient", "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.BackendServicesClient.patch", + "fullName": "google.cloud.compute_v1.BackendServicesClient.get", "method": { - "fullName": "google.cloud.compute.v1.BackendServices.Patch", + "fullName": "google.cloud.compute.v1.BackendServices.Get", "service": { "fullName": "google.cloud.compute.v1.BackendServices", "shortName": "BackendServices" }, - "shortName": "Patch" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchBackendServiceRequest" + "type": "google.cloud.compute_v1.types.GetBackendServiceRequest" }, { "name": "project", @@ -3040,10 +3052,6 @@ "name": "backend_service", "type": "str" }, - { - "name": "backend_service_resource", - "type": "google.cloud.compute_v1.types.BackendService" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -3057,14 +3065,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "resultType": "google.cloud.compute_v1.types.BackendService", + "shortName": "get" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_backend_services_patch_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_backend_services_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendServices_Patch_sync", + "regionTag": "compute_v1_generated_BackendServices_Get_sync", "segments": [ { "end": 52, @@ -3097,7 +3105,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_backend_services_patch_sync.py" + "title": "compute_v1_generated_backend_services_get_sync.py" }, { "canonical": true, @@ -3106,31 +3114,27 @@ "fullName": "google.cloud.compute_v1.BackendServicesClient", "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.BackendServicesClient.set_edge_security_policy", + "fullName": "google.cloud.compute_v1.BackendServicesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.BackendServices.SetEdgeSecurityPolicy", + "fullName": "google.cloud.compute.v1.BackendServices.Insert", "service": { "fullName": "google.cloud.compute.v1.BackendServices", "shortName": "BackendServices" }, - "shortName": "SetEdgeSecurityPolicy" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetEdgeSecurityPolicyBackendServiceRequest" + "type": "google.cloud.compute_v1.types.InsertBackendServiceRequest" }, { "name": "project", "type": "str" }, { - "name": "backend_service", - "type": "str" - }, - { - "name": "security_policy_reference_resource", - "type": "google.cloud.compute_v1.types.SecurityPolicyReference" + "name": "backend_service_resource", + "type": "google.cloud.compute_v1.types.BackendService" }, { "name": "retry", @@ -3146,21 +3150,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_edge_security_policy" + "shortName": "insert" }, - "description": "Sample for SetEdgeSecurityPolicy", - "file": "compute_v1_generated_backend_services_set_edge_security_policy_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_backend_services_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendServices_SetEdgeSecurityPolicy_sync", + "regionTag": "compute_v1_generated_BackendServices_Insert_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3170,22 +3174,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_backend_services_set_edge_security_policy_sync.py" + "title": "compute_v1_generated_backend_services_insert_sync.py" }, { "canonical": true, @@ -3194,32 +3198,24 @@ "fullName": "google.cloud.compute_v1.BackendServicesClient", "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.BackendServicesClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.BackendServicesClient.list_usable", "method": { - "fullName": "google.cloud.compute.v1.BackendServices.SetIamPolicy", + "fullName": "google.cloud.compute.v1.BackendServices.ListUsable", "service": { "fullName": "google.cloud.compute.v1.BackendServices", "shortName": "BackendServices" }, - "shortName": "SetIamPolicy" + "shortName": "ListUsable" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyBackendServiceRequest" + "type": "google.cloud.compute_v1.types.ListUsableBackendServicesRequest" }, { "name": "project", "type": "str" }, - { - "name": "resource", - "type": "str" - }, - { - "name": "global_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -3233,14 +3229,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.cloud.compute_v1.services.backend_services.pagers.ListUsablePager", + "shortName": "list_usable" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_backend_services_set_iam_policy_sync.py", + "description": "Sample for ListUsable", + "file": "compute_v1_generated_backend_services_list_usable_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendServices_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_BackendServices_ListUsable_sync", "segments": [ { "end": 52, @@ -3258,22 +3254,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_backend_services_set_iam_policy_sync.py" + "title": "compute_v1_generated_backend_services_list_usable_sync.py" }, { "canonical": true, @@ -3282,32 +3278,24 @@ "fullName": "google.cloud.compute_v1.BackendServicesClient", "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.BackendServicesClient.set_security_policy", + "fullName": "google.cloud.compute_v1.BackendServicesClient.list", "method": { - "fullName": "google.cloud.compute.v1.BackendServices.SetSecurityPolicy", + "fullName": "google.cloud.compute.v1.BackendServices.List", "service": { "fullName": "google.cloud.compute.v1.BackendServices", "shortName": "BackendServices" }, - "shortName": "SetSecurityPolicy" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetSecurityPolicyBackendServiceRequest" + "type": "google.cloud.compute_v1.types.ListBackendServicesRequest" }, { "name": "project", "type": "str" }, - { - "name": "backend_service", - "type": "str" - }, - { - "name": "security_policy_reference_resource", - "type": "google.cloud.compute_v1.types.SecurityPolicyReference" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -3321,14 +3309,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_security_policy" + "resultType": "google.cloud.compute_v1.services.backend_services.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for SetSecurityPolicy", - "file": "compute_v1_generated_backend_services_set_security_policy_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_backend_services_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendServices_SetSecurityPolicy_sync", + "regionTag": "compute_v1_generated_BackendServices_List_sync", "segments": [ { "end": 52, @@ -3346,22 +3334,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_backend_services_set_security_policy_sync.py" + "title": "compute_v1_generated_backend_services_list_sync.py" }, { "canonical": true, @@ -3370,19 +3358,19 @@ "fullName": "google.cloud.compute_v1.BackendServicesClient", "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.BackendServicesClient.update", + "fullName": "google.cloud.compute_v1.BackendServicesClient.patch", "method": { - "fullName": "google.cloud.compute.v1.BackendServices.Update", + "fullName": "google.cloud.compute.v1.BackendServices.Patch", "service": { "fullName": "google.cloud.compute.v1.BackendServices", "shortName": "BackendServices" }, - "shortName": "Update" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.UpdateBackendServiceRequest" + "type": "google.cloud.compute_v1.types.PatchBackendServiceRequest" }, { "name": "project", @@ -3410,13 +3398,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "update" + "shortName": "patch" }, - "description": "Sample for Update", - "file": "compute_v1_generated_backend_services_update_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_backend_services_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_BackendServices_Update_sync", + "regionTag": "compute_v1_generated_BackendServices_Patch_sync", "segments": [ { "end": 52, @@ -3449,33 +3437,41 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_backend_services_update_sync.py" + "title": "compute_v1_generated_backend_services_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.DiskTypesClient", - "shortName": "DiskTypesClient" + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.DiskTypesClient.aggregated_list", + "fullName": "google.cloud.compute_v1.BackendServicesClient.set_edge_security_policy", "method": { - "fullName": "google.cloud.compute.v1.DiskTypes.AggregatedList", + "fullName": "google.cloud.compute.v1.BackendServices.SetEdgeSecurityPolicy", "service": { - "fullName": "google.cloud.compute.v1.DiskTypes", - "shortName": "DiskTypes" + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" }, - "shortName": "AggregatedList" + "shortName": "SetEdgeSecurityPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListDiskTypesRequest" + "type": "google.cloud.compute_v1.types.SetEdgeSecurityPolicyBackendServiceRequest" }, { "name": "project", "type": "str" }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "security_policy_reference_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyReference" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -3489,14 +3485,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.disk_types.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_edge_security_policy" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_disk_types_aggregated_list_sync.py", + "description": "Sample for SetEdgeSecurityPolicy", + "file": "compute_v1_generated_backend_services_set_edge_security_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_DiskTypes_AggregatedList_sync", + "regionTag": "compute_v1_generated_BackendServices_SetEdgeSecurityPolicy_sync", "segments": [ { "end": 52, @@ -3514,55 +3510,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disk_types_aggregated_list_sync.py" + "title": "compute_v1_generated_backend_services_set_edge_security_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.DiskTypesClient", - "shortName": "DiskTypesClient" + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.DiskTypesClient.get", + "fullName": "google.cloud.compute_v1.BackendServicesClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.DiskTypes.Get", + "fullName": "google.cloud.compute.v1.BackendServices.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.DiskTypes", - "shortName": "DiskTypes" + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" }, - "shortName": "Get" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetDiskTypeRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyBackendServiceRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", + "name": "resource", "type": "str" }, { - "name": "disk_type", - "type": "str" + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" }, { "name": "retry", @@ -3577,22 +3573,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.DiskType", - "shortName": "get" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for Get", - "file": "compute_v1_generated_disk_types_get_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_backend_services_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_DiskTypes_Get_sync", + "regionTag": "compute_v1_generated_BackendServices_SetIamPolicy_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3602,52 +3598,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disk_types_get_sync.py" + "title": "compute_v1_generated_backend_services_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.DiskTypesClient", - "shortName": "DiskTypesClient" + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.DiskTypesClient.list", + "fullName": "google.cloud.compute_v1.BackendServicesClient.set_security_policy", "method": { - "fullName": "google.cloud.compute.v1.DiskTypes.List", + "fullName": "google.cloud.compute.v1.BackendServices.SetSecurityPolicy", "service": { - "fullName": "google.cloud.compute.v1.DiskTypes", - "shortName": "DiskTypes" + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" }, - "shortName": "List" + "shortName": "SetSecurityPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListDiskTypesRequest" + "type": "google.cloud.compute_v1.types.SetSecurityPolicyBackendServiceRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", + "name": "backend_service", "type": "str" }, + { + "name": "security_policy_reference_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyReference" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -3661,22 +3661,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.disk_types.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_security_policy" }, - "description": "Sample for List", - "file": "compute_v1_generated_disk_types_list_sync.py", + "description": "Sample for SetSecurityPolicy", + "file": "compute_v1_generated_backend_services_set_security_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_DiskTypes_List_sync", + "regionTag": "compute_v1_generated_BackendServices_SetSecurityPolicy_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3696,49 +3696,45 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 53, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disk_types_list_sync.py" + "title": "compute_v1_generated_backend_services_set_security_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.DisksClient", - "shortName": "DisksClient" + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.add_resource_policies", + "fullName": "google.cloud.compute_v1.BackendServicesClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.Disks.AddResourcePolicies", + "fullName": "google.cloud.compute.v1.BackendServices.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.Disks", - "shortName": "Disks" + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" }, - "shortName": "AddResourcePolicies" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AddResourcePoliciesDiskRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsBackendServiceRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", - "type": "str" - }, - { - "name": "disk", + "name": "resource", "type": "str" }, { - "name": "disks_add_resource_policies_request_resource", - "type": "google.cloud.compute_v1.types.DisksAddResourcePoliciesRequest" + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" }, { "name": "retry", @@ -3753,22 +3749,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "add_resource_policies" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for AddResourcePolicies", - "file": "compute_v1_generated_disks_add_resource_policies_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_backend_services_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_AddResourcePolicies_sync", + "regionTag": "compute_v1_generated_BackendServices_TestIamPermissions_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3778,48 +3774,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_add_resource_policies_sync.py" + "title": "compute_v1_generated_backend_services_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.DisksClient", - "shortName": "DisksClient" + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.aggregated_list", + "fullName": "google.cloud.compute_v1.BackendServicesClient.update", "method": { - "fullName": "google.cloud.compute.v1.Disks.AggregatedList", + "fullName": "google.cloud.compute.v1.BackendServices.Update", "service": { - "fullName": "google.cloud.compute.v1.Disks", - "shortName": "Disks" + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" }, - "shortName": "AggregatedList" + "shortName": "Update" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListDisksRequest" + "type": "google.cloud.compute_v1.types.UpdateBackendServiceRequest" }, { "name": "project", "type": "str" }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "backend_service_resource", + "type": "google.cloud.compute_v1.types.BackendService" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -3833,14 +3837,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.disks.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_disks_aggregated_list_sync.py", + "description": "Sample for Update", + "file": "compute_v1_generated_backend_services_update_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_AggregatedList_sync", + "regionTag": "compute_v1_generated_BackendServices_Update_sync", "segments": [ { "end": 52, @@ -3858,56 +3862,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_aggregated_list_sync.py" + "title": "compute_v1_generated_backend_services_update_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.DisksClient", - "shortName": "DisksClient" + "fullName": "google.cloud.compute_v1.DiskTypesClient", + "shortName": "DiskTypesClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.bulk_insert", + "fullName": "google.cloud.compute_v1.DiskTypesClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.Disks.BulkInsert", + "fullName": "google.cloud.compute.v1.DiskTypes.AggregatedList", "service": { - "fullName": "google.cloud.compute.v1.Disks", - "shortName": "Disks" + "fullName": "google.cloud.compute.v1.DiskTypes", + "shortName": "DiskTypes" }, - "shortName": "BulkInsert" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.BulkInsertDiskRequest" + "type": "google.cloud.compute_v1.types.AggregatedListDiskTypesRequest" }, { "name": "project", "type": "str" }, - { - "name": "zone", - "type": "str" - }, - { - "name": "bulk_insert_disk_resource_resource", - "type": "google.cloud.compute_v1.types.BulkInsertDiskResource" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -3921,14 +3917,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "bulk_insert" + "resultType": "google.cloud.compute_v1.services.disk_types.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for BulkInsert", - "file": "compute_v1_generated_disks_bulk_insert_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_disk_types_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_BulkInsert_sync", + "regionTag": "compute_v1_generated_DiskTypes_AggregatedList_sync", "segments": [ { "end": 52, @@ -3946,43 +3942,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_bulk_insert_sync.py" + "title": "compute_v1_generated_disk_types_aggregated_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.DisksClient", - "shortName": "DisksClient" + "fullName": "google.cloud.compute_v1.DiskTypesClient", + "shortName": "DiskTypesClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.create_snapshot", + "fullName": "google.cloud.compute_v1.DiskTypesClient.get", "method": { - "fullName": "google.cloud.compute.v1.Disks.CreateSnapshot", + "fullName": "google.cloud.compute.v1.DiskTypes.Get", "service": { - "fullName": "google.cloud.compute.v1.Disks", - "shortName": "Disks" + "fullName": "google.cloud.compute.v1.DiskTypes", + "shortName": "DiskTypes" }, - "shortName": "CreateSnapshot" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.CreateSnapshotDiskRequest" + "type": "google.cloud.compute_v1.types.GetDiskTypeRequest" }, { "name": "project", @@ -3993,13 +3989,9 @@ "type": "str" }, { - "name": "disk", + "name": "disk_type", "type": "str" }, - { - "name": "snapshot_resource", - "type": "google.cloud.compute_v1.types.Snapshot" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -4013,14 +4005,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "create_snapshot" + "resultType": "google.cloud.compute_v1.types.DiskType", + "shortName": "get" }, - "description": "Sample for CreateSnapshot", - "file": "compute_v1_generated_disks_create_snapshot_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_disk_types_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_CreateSnapshot_sync", + "regionTag": "compute_v1_generated_DiskTypes_Get_sync", "segments": [ { "end": 53, @@ -4053,28 +4045,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_create_snapshot_sync.py" + "title": "compute_v1_generated_disk_types_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.DisksClient", - "shortName": "DisksClient" + "fullName": "google.cloud.compute_v1.DiskTypesClient", + "shortName": "DiskTypesClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.delete", + "fullName": "google.cloud.compute_v1.DiskTypesClient.list", "method": { - "fullName": "google.cloud.compute.v1.Disks.Delete", + "fullName": "google.cloud.compute.v1.DiskTypes.List", "service": { - "fullName": "google.cloud.compute.v1.Disks", - "shortName": "Disks" + "fullName": "google.cloud.compute.v1.DiskTypes", + "shortName": "DiskTypes" }, - "shortName": "Delete" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteDiskRequest" + "type": "google.cloud.compute_v1.types.ListDiskTypesRequest" }, { "name": "project", @@ -4084,10 +4076,6 @@ "name": "zone", "type": "str" }, - { - "name": "disk", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -4101,14 +4089,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.services.disk_types.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_disks_delete_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_disk_types_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_Delete_sync", + "regionTag": "compute_v1_generated_DiskTypes_List_sync", "segments": [ { "end": 53, @@ -4126,22 +4114,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_delete_sync.py" + "title": "compute_v1_generated_disk_types_list_sync.py" }, { "canonical": true, @@ -4150,19 +4138,19 @@ "fullName": "google.cloud.compute_v1.DisksClient", "shortName": "DisksClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.DisksClient.add_resource_policies", "method": { - "fullName": "google.cloud.compute.v1.Disks.GetIamPolicy", + "fullName": "google.cloud.compute.v1.Disks.AddResourcePolicies", "service": { "fullName": "google.cloud.compute.v1.Disks", "shortName": "Disks" }, - "shortName": "GetIamPolicy" + "shortName": "AddResourcePolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyDiskRequest" + "type": "google.cloud.compute_v1.types.AddResourcePoliciesDiskRequest" }, { "name": "project", @@ -4173,9 +4161,13 @@ "type": "str" }, { - "name": "resource", + "name": "disk", "type": "str" }, + { + "name": "disks_add_resource_policies_request_resource", + "type": "google.cloud.compute_v1.types.DisksAddResourcePoliciesRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -4189,14 +4181,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_resource_policies" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_disks_get_iam_policy_sync.py", + "description": "Sample for AddResourcePolicies", + "file": "compute_v1_generated_disks_add_resource_policies_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_Disks_AddResourcePolicies_sync", "segments": [ { "end": 53, @@ -4229,7 +4221,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_get_iam_policy_sync.py" + "title": "compute_v1_generated_disks_add_resource_policies_sync.py" }, { "canonical": true, @@ -4238,32 +4230,24 @@ "fullName": "google.cloud.compute_v1.DisksClient", "shortName": "DisksClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.get", + "fullName": "google.cloud.compute_v1.DisksClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.Disks.Get", + "fullName": "google.cloud.compute.v1.Disks.AggregatedList", "service": { "fullName": "google.cloud.compute.v1.Disks", "shortName": "Disks" }, - "shortName": "Get" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetDiskRequest" + "type": "google.cloud.compute_v1.types.AggregatedListDisksRequest" }, { "name": "project", "type": "str" }, - { - "name": "zone", - "type": "str" - }, - { - "name": "disk", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -4277,22 +4261,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Disk", - "shortName": "get" + "resultType": "google.cloud.compute_v1.services.disks.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for Get", - "file": "compute_v1_generated_disks_get_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_disks_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_Get_sync", + "regionTag": "compute_v1_generated_Disks_AggregatedList_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -4302,22 +4286,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_get_sync.py" + "title": "compute_v1_generated_disks_aggregated_list_sync.py" }, { "canonical": true, @@ -4326,19 +4310,19 @@ "fullName": "google.cloud.compute_v1.DisksClient", "shortName": "DisksClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.insert", + "fullName": "google.cloud.compute_v1.DisksClient.bulk_insert", "method": { - "fullName": "google.cloud.compute.v1.Disks.Insert", + "fullName": "google.cloud.compute.v1.Disks.BulkInsert", "service": { "fullName": "google.cloud.compute.v1.Disks", "shortName": "Disks" }, - "shortName": "Insert" + "shortName": "BulkInsert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertDiskRequest" + "type": "google.cloud.compute_v1.types.BulkInsertDiskRequest" }, { "name": "project", @@ -4349,8 +4333,8 @@ "type": "str" }, { - "name": "disk_resource", - "type": "google.cloud.compute_v1.types.Disk" + "name": "bulk_insert_disk_resource_resource", + "type": "google.cloud.compute_v1.types.BulkInsertDiskResource" }, { "name": "retry", @@ -4366,13 +4350,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "bulk_insert" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_disks_insert_sync.py", + "description": "Sample for BulkInsert", + "file": "compute_v1_generated_disks_bulk_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_Insert_sync", + "regionTag": "compute_v1_generated_Disks_BulkInsert_sync", "segments": [ { "end": 52, @@ -4405,91 +4389,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_insert_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.compute_v1.DisksClient", - "shortName": "DisksClient" - }, - "fullName": "google.cloud.compute_v1.DisksClient.list", - "method": { - "fullName": "google.cloud.compute.v1.Disks.List", - "service": { - "fullName": "google.cloud.compute.v1.Disks", - "shortName": "Disks" - }, - "shortName": "List" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.compute_v1.types.ListDisksRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "zone", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.compute_v1.services.disks.pagers.ListPager", - "shortName": "list" - }, - "description": "Sample for List", - "file": "compute_v1_generated_disks_list_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_List_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "compute_v1_generated_disks_list_sync.py" + "title": "compute_v1_generated_disks_bulk_insert_sync.py" }, { "canonical": true, @@ -4498,19 +4398,19 @@ "fullName": "google.cloud.compute_v1.DisksClient", "shortName": "DisksClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.remove_resource_policies", + "fullName": "google.cloud.compute_v1.DisksClient.create_snapshot", "method": { - "fullName": "google.cloud.compute.v1.Disks.RemoveResourcePolicies", + "fullName": "google.cloud.compute.v1.Disks.CreateSnapshot", "service": { "fullName": "google.cloud.compute.v1.Disks", "shortName": "Disks" }, - "shortName": "RemoveResourcePolicies" + "shortName": "CreateSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.RemoveResourcePoliciesDiskRequest" + "type": "google.cloud.compute_v1.types.CreateSnapshotDiskRequest" }, { "name": "project", @@ -4525,8 +4425,8 @@ "type": "str" }, { - "name": "disks_remove_resource_policies_request_resource", - "type": "google.cloud.compute_v1.types.DisksRemoveResourcePoliciesRequest" + "name": "snapshot_resource", + "type": "google.cloud.compute_v1.types.Snapshot" }, { "name": "retry", @@ -4542,13 +4442,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "remove_resource_policies" + "shortName": "create_snapshot" }, - "description": "Sample for RemoveResourcePolicies", - "file": "compute_v1_generated_disks_remove_resource_policies_sync.py", + "description": "Sample for CreateSnapshot", + "file": "compute_v1_generated_disks_create_snapshot_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_RemoveResourcePolicies_sync", + "regionTag": "compute_v1_generated_Disks_CreateSnapshot_sync", "segments": [ { "end": 53, @@ -4581,7 +4481,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_remove_resource_policies_sync.py" + "title": "compute_v1_generated_disks_create_snapshot_sync.py" }, { "canonical": true, @@ -4590,19 +4490,19 @@ "fullName": "google.cloud.compute_v1.DisksClient", "shortName": "DisksClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.resize", + "fullName": "google.cloud.compute_v1.DisksClient.delete", "method": { - "fullName": "google.cloud.compute.v1.Disks.Resize", + "fullName": "google.cloud.compute.v1.Disks.Delete", "service": { "fullName": "google.cloud.compute.v1.Disks", "shortName": "Disks" }, - "shortName": "Resize" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ResizeDiskRequest" + "type": "google.cloud.compute_v1.types.DeleteDiskRequest" }, { "name": "project", @@ -4616,10 +4516,6 @@ "name": "disk", "type": "str" }, - { - "name": "disks_resize_request_resource", - "type": "google.cloud.compute_v1.types.DisksResizeRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -4634,13 +4530,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "resize" + "shortName": "delete" }, - "description": "Sample for Resize", - "file": "compute_v1_generated_disks_resize_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_disks_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_Resize_sync", + "regionTag": "compute_v1_generated_Disks_Delete_sync", "segments": [ { "end": 53, @@ -4673,7 +4569,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_resize_sync.py" + "title": "compute_v1_generated_disks_delete_sync.py" }, { "canonical": true, @@ -4682,19 +4578,19 @@ "fullName": "google.cloud.compute_v1.DisksClient", "shortName": "DisksClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.DisksClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.Disks.SetIamPolicy", + "fullName": "google.cloud.compute.v1.Disks.GetIamPolicy", "service": { "fullName": "google.cloud.compute.v1.Disks", "shortName": "Disks" }, - "shortName": "SetIamPolicy" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyDiskRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyDiskRequest" }, { "name": "project", @@ -4708,10 +4604,6 @@ "name": "resource", "type": "str" }, - { - "name": "zone_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.ZoneSetPolicyRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -4726,13 +4618,13 @@ } ], "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "shortName": "get_iam_policy" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_disks_set_iam_policy_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_disks_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_Disks_GetIamPolicy_sync", "segments": [ { "end": 53, @@ -4765,7 +4657,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_set_iam_policy_sync.py" + "title": "compute_v1_generated_disks_get_iam_policy_sync.py" }, { "canonical": true, @@ -4774,19 +4666,19 @@ "fullName": "google.cloud.compute_v1.DisksClient", "shortName": "DisksClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.set_labels", + "fullName": "google.cloud.compute_v1.DisksClient.get", "method": { - "fullName": "google.cloud.compute.v1.Disks.SetLabels", + "fullName": "google.cloud.compute.v1.Disks.Get", "service": { "fullName": "google.cloud.compute.v1.Disks", "shortName": "Disks" }, - "shortName": "SetLabels" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetLabelsDiskRequest" + "type": "google.cloud.compute_v1.types.GetDiskRequest" }, { "name": "project", @@ -4797,13 +4689,9 @@ "type": "str" }, { - "name": "resource", + "name": "disk", "type": "str" }, - { - "name": "zone_set_labels_request_resource", - "type": "google.cloud.compute_v1.types.ZoneSetLabelsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -4817,14 +4705,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_labels" + "resultType": "google.cloud.compute_v1.types.Disk", + "shortName": "get" }, - "description": "Sample for SetLabels", - "file": "compute_v1_generated_disks_set_labels_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_disks_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_SetLabels_sync", + "regionTag": "compute_v1_generated_Disks_Get_sync", "segments": [ { "end": 53, @@ -4857,7 +4745,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_set_labels_sync.py" + "title": "compute_v1_generated_disks_get_sync.py" }, { "canonical": true, @@ -4866,19 +4754,19 @@ "fullName": "google.cloud.compute_v1.DisksClient", "shortName": "DisksClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.start_async_replication", + "fullName": "google.cloud.compute_v1.DisksClient.insert", "method": { - "fullName": "google.cloud.compute.v1.Disks.StartAsyncReplication", + "fullName": "google.cloud.compute.v1.Disks.Insert", "service": { "fullName": "google.cloud.compute.v1.Disks", "shortName": "Disks" }, - "shortName": "StartAsyncReplication" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.StartAsyncReplicationDiskRequest" + "type": "google.cloud.compute_v1.types.InsertDiskRequest" }, { "name": "project", @@ -4889,12 +4777,92 @@ "type": "str" }, { - "name": "disk", + "name": "disk_resource", + "type": "google.cloud.compute_v1.types.Disk" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_disks_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Disks.List", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListDisksRequest" + }, + { + "name": "project", "type": "str" }, { - "name": "disks_start_async_replication_request_resource", - "type": "google.cloud.compute_v1.types.DisksStartAsyncReplicationRequest" + "name": "zone", + "type": "str" }, { "name": "retry", @@ -4909,14 +4877,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "start_async_replication" + "resultType": "google.cloud.compute_v1.services.disks.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for StartAsyncReplication", - "file": "compute_v1_generated_disks_start_async_replication_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_disks_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_StartAsyncReplication_sync", + "regionTag": "compute_v1_generated_Disks_List_sync", "segments": [ { "end": 53, @@ -4934,22 +4902,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_start_async_replication_sync.py" + "title": "compute_v1_generated_disks_list_sync.py" }, { "canonical": true, @@ -4958,19 +4926,19 @@ "fullName": "google.cloud.compute_v1.DisksClient", "shortName": "DisksClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.stop_async_replication", + "fullName": "google.cloud.compute_v1.DisksClient.remove_resource_policies", "method": { - "fullName": "google.cloud.compute.v1.Disks.StopAsyncReplication", + "fullName": "google.cloud.compute.v1.Disks.RemoveResourcePolicies", "service": { "fullName": "google.cloud.compute.v1.Disks", "shortName": "Disks" }, - "shortName": "StopAsyncReplication" + "shortName": "RemoveResourcePolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.StopAsyncReplicationDiskRequest" + "type": "google.cloud.compute_v1.types.RemoveResourcePoliciesDiskRequest" }, { "name": "project", @@ -4984,6 +4952,10 @@ "name": "disk", "type": "str" }, + { + "name": "disks_remove_resource_policies_request_resource", + "type": "google.cloud.compute_v1.types.DisksRemoveResourcePoliciesRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -4998,13 +4970,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "stop_async_replication" + "shortName": "remove_resource_policies" }, - "description": "Sample for StopAsyncReplication", - "file": "compute_v1_generated_disks_stop_async_replication_sync.py", + "description": "Sample for RemoveResourcePolicies", + "file": "compute_v1_generated_disks_remove_resource_policies_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_StopAsyncReplication_sync", + "regionTag": "compute_v1_generated_Disks_RemoveResourcePolicies_sync", "segments": [ { "end": 53, @@ -5037,7 +5009,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_stop_async_replication_sync.py" + "title": "compute_v1_generated_disks_remove_resource_policies_sync.py" }, { "canonical": true, @@ -5046,19 +5018,19 @@ "fullName": "google.cloud.compute_v1.DisksClient", "shortName": "DisksClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.stop_group_async_replication", + "fullName": "google.cloud.compute_v1.DisksClient.resize", "method": { - "fullName": "google.cloud.compute.v1.Disks.StopGroupAsyncReplication", + "fullName": "google.cloud.compute.v1.Disks.Resize", "service": { "fullName": "google.cloud.compute.v1.Disks", "shortName": "Disks" }, - "shortName": "StopGroupAsyncReplication" + "shortName": "Resize" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.StopGroupAsyncReplicationDiskRequest" + "type": "google.cloud.compute_v1.types.ResizeDiskRequest" }, { "name": "project", @@ -5069,8 +5041,12 @@ "type": "str" }, { - "name": "disks_stop_group_async_replication_resource_resource", - "type": "google.cloud.compute_v1.types.DisksStopGroupAsyncReplicationResource" + "name": "disk", + "type": "str" + }, + { + "name": "disks_resize_request_resource", + "type": "google.cloud.compute_v1.types.DisksResizeRequest" }, { "name": "retry", @@ -5086,21 +5062,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "stop_group_async_replication" + "shortName": "resize" }, - "description": "Sample for StopGroupAsyncReplication", - "file": "compute_v1_generated_disks_stop_group_async_replication_sync.py", + "description": "Sample for Resize", + "file": "compute_v1_generated_disks_resize_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_StopGroupAsyncReplication_sync", + "regionTag": "compute_v1_generated_Disks_Resize_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -5110,22 +5086,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_stop_group_async_replication_sync.py" + "title": "compute_v1_generated_disks_resize_sync.py" }, { "canonical": true, @@ -5134,19 +5110,19 @@ "fullName": "google.cloud.compute_v1.DisksClient", "shortName": "DisksClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.DisksClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.Disks.TestIamPermissions", + "fullName": "google.cloud.compute.v1.Disks.SetIamPolicy", "service": { "fullName": "google.cloud.compute.v1.Disks", "shortName": "Disks" }, - "shortName": "TestIamPermissions" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsDiskRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyDiskRequest" }, { "name": "project", @@ -5161,8 +5137,8 @@ "type": "str" }, { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + "name": "zone_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.ZoneSetPolicyRequest" }, { "name": "retry", @@ -5177,14 +5153,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_disks_test_iam_permissions_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_disks_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_Disks_SetIamPolicy_sync", "segments": [ { "end": 53, @@ -5217,7 +5193,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_disks_test_iam_permissions_sync.py" + "title": "compute_v1_generated_disks_set_iam_policy_sync.py" }, { "canonical": true, @@ -5226,19 +5202,19 @@ "fullName": "google.cloud.compute_v1.DisksClient", "shortName": "DisksClient" }, - "fullName": "google.cloud.compute_v1.DisksClient.update", + "fullName": "google.cloud.compute_v1.DisksClient.set_labels", "method": { - "fullName": "google.cloud.compute.v1.Disks.Update", + "fullName": "google.cloud.compute.v1.Disks.SetLabels", "service": { "fullName": "google.cloud.compute.v1.Disks", "shortName": "Disks" }, - "shortName": "Update" + "shortName": "SetLabels" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.UpdateDiskRequest" + "type": "google.cloud.compute_v1.types.SetLabelsDiskRequest" }, { "name": "project", @@ -5249,12 +5225,12 @@ "type": "str" }, { - "name": "disk", + "name": "resource", "type": "str" }, { - "name": "disk_resource", - "type": "google.cloud.compute_v1.types.Disk" + "name": "zone_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.ZoneSetLabelsRequest" }, { "name": "retry", @@ -5270,13 +5246,465 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "update" + "shortName": "set_labels" }, - "description": "Sample for Update", - "file": "compute_v1_generated_disks_update_sync.py", + "description": "Sample for SetLabels", + "file": "compute_v1_generated_disks_set_labels_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Disks_Update_sync", + "regionTag": "compute_v1_generated_Disks_SetLabels_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.start_async_replication", + "method": { + "fullName": "google.cloud.compute.v1.Disks.StartAsyncReplication", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "StartAsyncReplication" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.StartAsyncReplicationDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "disks_start_async_replication_request_resource", + "type": "google.cloud.compute_v1.types.DisksStartAsyncReplicationRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "start_async_replication" + }, + "description": "Sample for StartAsyncReplication", + "file": "compute_v1_generated_disks_start_async_replication_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_StartAsyncReplication_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_start_async_replication_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.stop_async_replication", + "method": { + "fullName": "google.cloud.compute.v1.Disks.StopAsyncReplication", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "StopAsyncReplication" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.StopAsyncReplicationDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "stop_async_replication" + }, + "description": "Sample for StopAsyncReplication", + "file": "compute_v1_generated_disks_stop_async_replication_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_StopAsyncReplication_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_stop_async_replication_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.stop_group_async_replication", + "method": { + "fullName": "google.cloud.compute.v1.Disks.StopGroupAsyncReplication", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "StopGroupAsyncReplication" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.StopGroupAsyncReplicationDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disks_stop_group_async_replication_resource_resource", + "type": "google.cloud.compute_v1.types.DisksStopGroupAsyncReplicationResource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "stop_group_async_replication" + }, + "description": "Sample for StopGroupAsyncReplication", + "file": "compute_v1_generated_disks_stop_group_async_replication_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_StopGroupAsyncReplication_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_stop_group_async_replication_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.Disks.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_disks_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.update", + "method": { + "fullName": "google.cloud.compute.v1.Disks.Update", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "disk_resource", + "type": "google.cloud.compute_v1.types.Disk" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_disks_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_Update_sync", "segments": [ { "end": 53, @@ -19019,6 +19447,98 @@ ], "title": "compute_v1_generated_instances_set_scheduling_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.set_security_policy", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SetSecurityPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SetSecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetSecurityPolicyInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "instances_set_security_policy_request_resource", + "type": "google.cloud.compute_v1.types.InstancesSetSecurityPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_security_policy" + }, + "description": "Sample for SetSecurityPolicy", + "file": "compute_v1_generated_instances_set_security_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SetSecurityPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_set_security_policy_sync.py" + }, { "canonical": true, "clientMethod": { @@ -21414,19 +21934,19 @@ "fullName": "google.cloud.compute_v1.InterconnectsClient", "shortName": "InterconnectsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectsClient.get", + "fullName": "google.cloud.compute_v1.InterconnectsClient.get_macsec_config", "method": { - "fullName": "google.cloud.compute.v1.Interconnects.Get", + "fullName": "google.cloud.compute.v1.Interconnects.GetMacsecConfig", "service": { "fullName": "google.cloud.compute.v1.Interconnects", "shortName": "Interconnects" }, - "shortName": "Get" + "shortName": "GetMacsecConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetInterconnectRequest" + "type": "google.cloud.compute_v1.types.GetMacsecConfigInterconnectRequest" }, { "name": "project", @@ -21449,14 +21969,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Interconnect", - "shortName": "get" + "resultType": "google.cloud.compute_v1.types.InterconnectsGetMacsecConfigResponse", + "shortName": "get_macsec_config" }, - "description": "Sample for Get", - "file": "compute_v1_generated_interconnects_get_sync.py", + "description": "Sample for GetMacsecConfig", + "file": "compute_v1_generated_interconnects_get_macsec_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Interconnects_Get_sync", + "regionTag": "compute_v1_generated_Interconnects_GetMacsecConfig_sync", "segments": [ { "end": 52, @@ -21489,171 +22009,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnects_get_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.compute_v1.InterconnectsClient", - "shortName": "InterconnectsClient" - }, - "fullName": "google.cloud.compute_v1.InterconnectsClient.insert", - "method": { - "fullName": "google.cloud.compute.v1.Interconnects.Insert", - "service": { - "fullName": "google.cloud.compute.v1.Interconnects", - "shortName": "Interconnects" - }, - "shortName": "Insert" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.compute_v1.types.InsertInterconnectRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "interconnect_resource", - "type": "google.cloud.compute_v1.types.Interconnect" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" - }, - "description": "Sample for Insert", - "file": "compute_v1_generated_interconnects_insert_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Interconnects_Insert_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "compute_v1_generated_interconnects_insert_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.compute_v1.InterconnectsClient", - "shortName": "InterconnectsClient" - }, - "fullName": "google.cloud.compute_v1.InterconnectsClient.list", - "method": { - "fullName": "google.cloud.compute.v1.Interconnects.List", - "service": { - "fullName": "google.cloud.compute.v1.Interconnects", - "shortName": "Interconnects" - }, - "shortName": "List" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.compute_v1.types.ListInterconnectsRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.compute_v1.services.interconnects.pagers.ListPager", - "shortName": "list" - }, - "description": "Sample for List", - "file": "compute_v1_generated_interconnects_list_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Interconnects_List_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "compute_v1_generated_interconnects_list_sync.py" + "title": "compute_v1_generated_interconnects_get_macsec_config_sync.py" }, { "canonical": true, @@ -21662,19 +22018,19 @@ "fullName": "google.cloud.compute_v1.InterconnectsClient", "shortName": "InterconnectsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectsClient.patch", + "fullName": "google.cloud.compute_v1.InterconnectsClient.get", "method": { - "fullName": "google.cloud.compute.v1.Interconnects.Patch", + "fullName": "google.cloud.compute.v1.Interconnects.Get", "service": { "fullName": "google.cloud.compute.v1.Interconnects", "shortName": "Interconnects" }, - "shortName": "Patch" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchInterconnectRequest" + "type": "google.cloud.compute_v1.types.GetInterconnectRequest" }, { "name": "project", @@ -21684,10 +22040,6 @@ "name": "interconnect", "type": "str" }, - { - "name": "interconnect_resource", - "type": "google.cloud.compute_v1.types.Interconnect" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -21701,14 +22053,266 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "resultType": "google.cloud.compute_v1.types.Interconnect", + "shortName": "get" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_interconnects_patch_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_interconnects_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Interconnects_Patch_sync", + "regionTag": "compute_v1_generated_Interconnects_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnects_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Interconnects.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertInterconnectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "interconnect_resource", + "type": "google.cloud.compute_v1.types.Interconnect" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_interconnects_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Interconnects_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnects_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Interconnects.List", + "service": { + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListInterconnectsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.interconnects.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_interconnects_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Interconnects_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnects_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectsClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.Interconnects.Patch", + "service": { + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchInterconnectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "interconnect", + "type": "str" + }, + { + "name": "interconnect_resource", + "type": "google.cloud.compute_v1.types.Interconnect" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_interconnects_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Interconnects_Patch_sync", "segments": [ { "end": 52, @@ -23962,19 +24566,19 @@ "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", "shortName": "NetworkAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.patch", "method": { - "fullName": "google.cloud.compute.v1.NetworkAttachments.SetIamPolicy", + "fullName": "google.cloud.compute.v1.NetworkAttachments.Patch", "service": { "fullName": "google.cloud.compute.v1.NetworkAttachments", "shortName": "NetworkAttachments" }, - "shortName": "SetIamPolicy" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyNetworkAttachmentRequest" + "type": "google.cloud.compute_v1.types.PatchNetworkAttachmentRequest" }, { "name": "project", @@ -23985,12 +24589,12 @@ "type": "str" }, { - "name": "resource", + "name": "network_attachment", "type": "str" }, { - "name": "region_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + "name": "network_attachment_resource", + "type": "google.cloud.compute_v1.types.NetworkAttachment" }, { "name": "retry", @@ -24005,14 +24609,106 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_network_attachments_set_iam_policy_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_network_attachments_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkAttachments_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_NetworkAttachments_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_attachments_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", + "shortName": "NetworkAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.NetworkAttachments.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.NetworkAttachments", + "shortName": "NetworkAttachments" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyNetworkAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_network_attachments_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkAttachments_SetIamPolicy_sync", "segments": [ { "end": 53, @@ -30893,7 +31589,503 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_projects_get_xpn_host_sync.py" + "title": "compute_v1_generated_projects_get_xpn_host_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.get_xpn_resources", + "method": { + "fullName": "google.cloud.compute.v1.Projects.GetXpnResources", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "GetXpnResources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetXpnResourcesProjectsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.projects.pagers.GetXpnResourcesPager", + "shortName": "get_xpn_resources" + }, + "description": "Sample for GetXpnResources", + "file": "compute_v1_generated_projects_get_xpn_resources_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_GetXpnResources_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_get_xpn_resources_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Projects.Get", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Project", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_projects_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_Get_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.list_xpn_hosts", + "method": { + "fullName": "google.cloud.compute.v1.Projects.ListXpnHosts", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "ListXpnHosts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListXpnHostsProjectsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "projects_list_xpn_hosts_request_resource", + "type": "google.cloud.compute_v1.types.ProjectsListXpnHostsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.projects.pagers.ListXpnHostsPager", + "shortName": "list_xpn_hosts" + }, + "description": "Sample for ListXpnHosts", + "file": "compute_v1_generated_projects_list_xpn_hosts_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_ListXpnHosts_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_list_xpn_hosts_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.move_disk", + "method": { + "fullName": "google.cloud.compute.v1.Projects.MoveDisk", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "MoveDisk" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.MoveDiskProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "disk_move_request_resource", + "type": "google.cloud.compute_v1.types.DiskMoveRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "move_disk" + }, + "description": "Sample for MoveDisk", + "file": "compute_v1_generated_projects_move_disk_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_MoveDisk_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_move_disk_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.move_instance", + "method": { + "fullName": "google.cloud.compute.v1.Projects.MoveInstance", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "MoveInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.MoveInstanceProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "instance_move_request_resource", + "type": "google.cloud.compute_v1.types.InstanceMoveRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "move_instance" + }, + "description": "Sample for MoveInstance", + "file": "compute_v1_generated_projects_move_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_MoveInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_move_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.set_common_instance_metadata", + "method": { + "fullName": "google.cloud.compute.v1.Projects.SetCommonInstanceMetadata", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "SetCommonInstanceMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetCommonInstanceMetadataProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "metadata_resource", + "type": "google.cloud.compute_v1.types.Metadata" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_common_instance_metadata" + }, + "description": "Sample for SetCommonInstanceMetadata", + "file": "compute_v1_generated_projects_set_common_instance_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_SetCommonInstanceMetadata_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_set_common_instance_metadata_sync.py" }, { "canonical": true, @@ -30902,24 +32094,28 @@ "fullName": "google.cloud.compute_v1.ProjectsClient", "shortName": "ProjectsClient" }, - "fullName": "google.cloud.compute_v1.ProjectsClient.get_xpn_resources", + "fullName": "google.cloud.compute_v1.ProjectsClient.set_default_network_tier", "method": { - "fullName": "google.cloud.compute.v1.Projects.GetXpnResources", + "fullName": "google.cloud.compute.v1.Projects.SetDefaultNetworkTier", "service": { "fullName": "google.cloud.compute.v1.Projects", "shortName": "Projects" }, - "shortName": "GetXpnResources" + "shortName": "SetDefaultNetworkTier" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetXpnResourcesProjectsRequest" + "type": "google.cloud.compute_v1.types.SetDefaultNetworkTierProjectRequest" }, { "name": "project", "type": "str" }, + { + "name": "projects_set_default_network_tier_request_resource", + "type": "google.cloud.compute_v1.types.ProjectsSetDefaultNetworkTierRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -30933,22 +32129,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.projects.pagers.GetXpnResourcesPager", - "shortName": "get_xpn_resources" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_default_network_tier" }, - "description": "Sample for GetXpnResources", - "file": "compute_v1_generated_projects_get_xpn_resources_sync.py", + "description": "Sample for SetDefaultNetworkTier", + "file": "compute_v1_generated_projects_set_default_network_tier_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Projects_GetXpnResources_sync", + "regionTag": "compute_v1_generated_Projects_SetDefaultNetworkTier_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -30968,12 +32164,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_projects_get_xpn_resources_sync.py" + "title": "compute_v1_generated_projects_set_default_network_tier_sync.py" }, { "canonical": true, @@ -30982,24 +32178,28 @@ "fullName": "google.cloud.compute_v1.ProjectsClient", "shortName": "ProjectsClient" }, - "fullName": "google.cloud.compute_v1.ProjectsClient.get", + "fullName": "google.cloud.compute_v1.ProjectsClient.set_usage_export_bucket", "method": { - "fullName": "google.cloud.compute.v1.Projects.Get", + "fullName": "google.cloud.compute.v1.Projects.SetUsageExportBucket", "service": { "fullName": "google.cloud.compute.v1.Projects", "shortName": "Projects" }, - "shortName": "Get" + "shortName": "SetUsageExportBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetProjectRequest" + "type": "google.cloud.compute_v1.types.SetUsageExportBucketProjectRequest" }, { "name": "project", "type": "str" }, + { + "name": "usage_export_location_resource", + "type": "google.cloud.compute_v1.types.UsageExportLocation" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -31013,14 +32213,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Project", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_usage_export_bucket" }, - "description": "Sample for Get", - "file": "compute_v1_generated_projects_get_sync.py", + "description": "Sample for SetUsageExportBucket", + "file": "compute_v1_generated_projects_set_usage_export_bucket_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Projects_Get_sync", + "regionTag": "compute_v1_generated_Projects_SetUsageExportBucket_sync", "segments": [ { "end": 51, @@ -31053,36 +32253,36 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_projects_get_sync.py" + "title": "compute_v1_generated_projects_set_usage_export_bucket_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.ProjectsClient", - "shortName": "ProjectsClient" + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", + "shortName": "PublicAdvertisedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.ProjectsClient.list_xpn_hosts", + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.announce", "method": { - "fullName": "google.cloud.compute.v1.Projects.ListXpnHosts", + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.Announce", "service": { - "fullName": "google.cloud.compute.v1.Projects", - "shortName": "Projects" + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", + "shortName": "PublicAdvertisedPrefixes" }, - "shortName": "ListXpnHosts" + "shortName": "Announce" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListXpnHostsProjectsRequest" + "type": "google.cloud.compute_v1.types.AnnouncePublicAdvertisedPrefixeRequest" }, { "name": "project", "type": "str" }, { - "name": "projects_list_xpn_hosts_request_resource", - "type": "google.cloud.compute_v1.types.ProjectsListXpnHostsRequest" + "name": "public_advertised_prefix", + "type": "str" }, { "name": "retry", @@ -31097,14 +32297,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.projects.pagers.ListXpnHostsPager", - "shortName": "list_xpn_hosts" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "announce" }, - "description": "Sample for ListXpnHosts", - "file": "compute_v1_generated_projects_list_xpn_hosts_sync.py", + "description": "Sample for Announce", + "file": "compute_v1_generated_public_advertised_prefixes_announce_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Projects_ListXpnHosts_sync", + "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_Announce_sync", "segments": [ { "end": 52, @@ -31122,51 +32322,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_projects_list_xpn_hosts_sync.py" + "title": "compute_v1_generated_public_advertised_prefixes_announce_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.ProjectsClient", - "shortName": "ProjectsClient" + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", + "shortName": "PublicAdvertisedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.ProjectsClient.move_disk", + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.Projects.MoveDisk", + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.Delete", "service": { - "fullName": "google.cloud.compute.v1.Projects", - "shortName": "Projects" + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", + "shortName": "PublicAdvertisedPrefixes" }, - "shortName": "MoveDisk" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.MoveDiskProjectRequest" + "type": "google.cloud.compute_v1.types.DeletePublicAdvertisedPrefixeRequest" }, { "name": "project", "type": "str" }, { - "name": "disk_move_request_resource", - "type": "google.cloud.compute_v1.types.DiskMoveRequest" + "name": "public_advertised_prefix", + "type": "str" }, { "name": "retry", @@ -31182,21 +32382,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "move_disk" + "shortName": "delete" }, - "description": "Sample for MoveDisk", - "file": "compute_v1_generated_projects_move_disk_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_public_advertised_prefixes_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Projects_MoveDisk_sync", + "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_Delete_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -31206,51 +32406,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_projects_move_disk_sync.py" + "title": "compute_v1_generated_public_advertised_prefixes_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.ProjectsClient", - "shortName": "ProjectsClient" + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", + "shortName": "PublicAdvertisedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.ProjectsClient.move_instance", + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.get", "method": { - "fullName": "google.cloud.compute.v1.Projects.MoveInstance", + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.Get", "service": { - "fullName": "google.cloud.compute.v1.Projects", - "shortName": "Projects" + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", + "shortName": "PublicAdvertisedPrefixes" }, - "shortName": "MoveInstance" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.MoveInstanceProjectRequest" + "type": "google.cloud.compute_v1.types.GetPublicAdvertisedPrefixeRequest" }, { "name": "project", "type": "str" }, { - "name": "instance_move_request_resource", - "type": "google.cloud.compute_v1.types.InstanceMoveRequest" + "name": "public_advertised_prefix", + "type": "str" }, { "name": "retry", @@ -31265,22 +32465,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "move_instance" + "resultType": "google.cloud.compute_v1.types.PublicAdvertisedPrefix", + "shortName": "get" }, - "description": "Sample for MoveInstance", - "file": "compute_v1_generated_projects_move_instance_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_public_advertised_prefixes_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Projects_MoveInstance_sync", + "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_Get_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -31290,51 +32490,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_projects_move_instance_sync.py" + "title": "compute_v1_generated_public_advertised_prefixes_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.ProjectsClient", - "shortName": "ProjectsClient" + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", + "shortName": "PublicAdvertisedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.ProjectsClient.set_common_instance_metadata", + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.Projects.SetCommonInstanceMetadata", + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.Insert", "service": { - "fullName": "google.cloud.compute.v1.Projects", - "shortName": "Projects" + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", + "shortName": "PublicAdvertisedPrefixes" }, - "shortName": "SetCommonInstanceMetadata" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetCommonInstanceMetadataProjectRequest" + "type": "google.cloud.compute_v1.types.InsertPublicAdvertisedPrefixeRequest" }, { "name": "project", "type": "str" }, { - "name": "metadata_resource", - "type": "google.cloud.compute_v1.types.Metadata" + "name": "public_advertised_prefix_resource", + "type": "google.cloud.compute_v1.types.PublicAdvertisedPrefix" }, { "name": "retry", @@ -31350,13 +32550,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_common_instance_metadata" + "shortName": "insert" }, - "description": "Sample for SetCommonInstanceMetadata", - "file": "compute_v1_generated_projects_set_common_instance_metadata_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_public_advertised_prefixes_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Projects_SetCommonInstanceMetadata_sync", + "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_Insert_sync", "segments": [ { "end": 51, @@ -31389,37 +32589,33 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_projects_set_common_instance_metadata_sync.py" + "title": "compute_v1_generated_public_advertised_prefixes_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.ProjectsClient", - "shortName": "ProjectsClient" + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", + "shortName": "PublicAdvertisedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.ProjectsClient.set_default_network_tier", + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.list", "method": { - "fullName": "google.cloud.compute.v1.Projects.SetDefaultNetworkTier", + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.List", "service": { - "fullName": "google.cloud.compute.v1.Projects", - "shortName": "Projects" + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", + "shortName": "PublicAdvertisedPrefixes" }, - "shortName": "SetDefaultNetworkTier" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetDefaultNetworkTierProjectRequest" + "type": "google.cloud.compute_v1.types.ListPublicAdvertisedPrefixesRequest" }, { "name": "project", "type": "str" }, - { - "name": "projects_set_default_network_tier_request_resource", - "type": "google.cloud.compute_v1.types.ProjectsSetDefaultNetworkTierRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -31433,22 +32629,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_default_network_tier" + "resultType": "google.cloud.compute_v1.services.public_advertised_prefixes.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for SetDefaultNetworkTier", - "file": "compute_v1_generated_projects_set_default_network_tier_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_public_advertised_prefixes_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Projects_SetDefaultNetworkTier_sync", + "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_List_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -31468,41 +32664,45 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_projects_set_default_network_tier_sync.py" + "title": "compute_v1_generated_public_advertised_prefixes_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.ProjectsClient", - "shortName": "ProjectsClient" + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", + "shortName": "PublicAdvertisedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.ProjectsClient.set_usage_export_bucket", + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.patch", "method": { - "fullName": "google.cloud.compute.v1.Projects.SetUsageExportBucket", + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.Patch", "service": { - "fullName": "google.cloud.compute.v1.Projects", - "shortName": "Projects" + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", + "shortName": "PublicAdvertisedPrefixes" }, - "shortName": "SetUsageExportBucket" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetUsageExportBucketProjectRequest" + "type": "google.cloud.compute_v1.types.PatchPublicAdvertisedPrefixeRequest" }, { "name": "project", "type": "str" }, { - "name": "usage_export_location_resource", - "type": "google.cloud.compute_v1.types.UsageExportLocation" + "name": "public_advertised_prefix", + "type": "str" + }, + { + "name": "public_advertised_prefix_resource", + "type": "google.cloud.compute_v1.types.PublicAdvertisedPrefix" }, { "name": "retry", @@ -31518,21 +32718,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_usage_export_bucket" + "shortName": "patch" }, - "description": "Sample for SetUsageExportBucket", - "file": "compute_v1_generated_projects_set_usage_export_bucket_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_public_advertised_prefixes_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Projects_SetUsageExportBucket_sync", + "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_Patch_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -31542,22 +32742,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_projects_set_usage_export_bucket_sync.py" + "title": "compute_v1_generated_public_advertised_prefixes_patch_sync.py" }, { "canonical": true, @@ -31566,19 +32766,19 @@ "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", "shortName": "PublicAdvertisedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.delete", + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.withdraw", "method": { - "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.Delete", + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.Withdraw", "service": { "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", "shortName": "PublicAdvertisedPrefixes" }, - "shortName": "Delete" + "shortName": "Withdraw" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeletePublicAdvertisedPrefixeRequest" + "type": "google.cloud.compute_v1.types.WithdrawPublicAdvertisedPrefixeRequest" }, { "name": "project", @@ -31602,13 +32802,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "withdraw" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_public_advertised_prefixes_delete_sync.py", + "description": "Sample for Withdraw", + "file": "compute_v1_generated_public_advertised_prefixes_withdraw_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_Delete_sync", + "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_Withdraw_sync", "segments": [ { "end": 52, @@ -31641,37 +32841,33 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_public_advertised_prefixes_delete_sync.py" + "title": "compute_v1_generated_public_advertised_prefixes_withdraw_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", - "shortName": "PublicAdvertisedPrefixesClient" + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", + "shortName": "PublicDelegatedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.get", + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.Get", + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.AggregatedList", "service": { - "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", - "shortName": "PublicAdvertisedPrefixes" + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", + "shortName": "PublicDelegatedPrefixes" }, - "shortName": "Get" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetPublicAdvertisedPrefixeRequest" + "type": "google.cloud.compute_v1.types.AggregatedListPublicDelegatedPrefixesRequest" }, { "name": "project", "type": "str" }, - { - "name": "public_advertised_prefix", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -31685,14 +32881,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.PublicAdvertisedPrefix", - "shortName": "get" + "resultType": "google.cloud.compute_v1.services.public_delegated_prefixes.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for Get", - "file": "compute_v1_generated_public_advertised_prefixes_get_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_public_delegated_prefixes_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_Get_sync", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_AggregatedList_sync", "segments": [ { "end": 52, @@ -31710,51 +32906,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_public_advertised_prefixes_get_sync.py" + "title": "compute_v1_generated_public_delegated_prefixes_aggregated_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", - "shortName": "PublicAdvertisedPrefixesClient" + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", + "shortName": "PublicDelegatedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.insert", + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.announce", "method": { - "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.Insert", + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Announce", "service": { - "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", - "shortName": "PublicAdvertisedPrefixes" + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", + "shortName": "PublicDelegatedPrefixes" }, - "shortName": "Insert" + "shortName": "Announce" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertPublicAdvertisedPrefixeRequest" + "type": "google.cloud.compute_v1.types.AnnouncePublicDelegatedPrefixeRequest" }, { "name": "project", "type": "str" }, { - "name": "public_advertised_prefix_resource", - "type": "google.cloud.compute_v1.types.PublicAdvertisedPrefix" + "name": "region", + "type": "str" + }, + { + "name": "public_delegated_prefix", + "type": "str" }, { "name": "retry", @@ -31770,21 +32970,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "announce" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_public_advertised_prefixes_insert_sync.py", + "description": "Sample for Announce", + "file": "compute_v1_generated_public_delegated_prefixes_announce_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_Insert_sync", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Announce_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -31794,48 +32994,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_public_advertised_prefixes_insert_sync.py" + "title": "compute_v1_generated_public_delegated_prefixes_announce_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", - "shortName": "PublicAdvertisedPrefixesClient" + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", + "shortName": "PublicDelegatedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.list", + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.List", + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Delete", "service": { - "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", - "shortName": "PublicAdvertisedPrefixes" + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", + "shortName": "PublicDelegatedPrefixes" }, - "shortName": "List" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListPublicAdvertisedPrefixesRequest" + "type": "google.cloud.compute_v1.types.DeletePublicDelegatedPrefixeRequest" }, { "name": "project", "type": "str" }, + { + "name": "region", + "type": "str" + }, + { + "name": "public_delegated_prefix", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -31849,22 +33057,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.public_advertised_prefixes.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for List", - "file": "compute_v1_generated_public_advertised_prefixes_list_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_public_delegated_prefixes_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_List_sync", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Delete_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -31874,55 +33082,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_public_advertised_prefixes_list_sync.py" + "title": "compute_v1_generated_public_delegated_prefixes_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", - "shortName": "PublicAdvertisedPrefixesClient" + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", + "shortName": "PublicDelegatedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.patch", + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.get", "method": { - "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.Patch", + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Get", "service": { - "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", - "shortName": "PublicAdvertisedPrefixes" + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", + "shortName": "PublicDelegatedPrefixes" }, - "shortName": "Patch" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchPublicAdvertisedPrefixeRequest" + "type": "google.cloud.compute_v1.types.GetPublicDelegatedPrefixeRequest" }, { "name": "project", "type": "str" }, { - "name": "public_advertised_prefix", + "name": "region", "type": "str" }, { - "name": "public_advertised_prefix_resource", - "type": "google.cloud.compute_v1.types.PublicAdvertisedPrefix" + "name": "public_delegated_prefix", + "type": "str" }, { "name": "retry", @@ -31937,22 +33145,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "resultType": "google.cloud.compute_v1.types.PublicDelegatedPrefix", + "shortName": "get" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_public_advertised_prefixes_patch_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_public_delegated_prefixes_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_Patch_sync", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Get_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -31962,22 +33170,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_public_advertised_prefixes_patch_sync.py" + "title": "compute_v1_generated_public_delegated_prefixes_get_sync.py" }, { "canonical": true, @@ -31986,24 +33194,32 @@ "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", "shortName": "PublicDelegatedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.aggregated_list", + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.AggregatedList", + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Insert", "service": { "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", "shortName": "PublicDelegatedPrefixes" }, - "shortName": "AggregatedList" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListPublicDelegatedPrefixesRequest" + "type": "google.cloud.compute_v1.types.InsertPublicDelegatedPrefixeRequest" }, { "name": "project", "type": "str" }, + { + "name": "region", + "type": "str" + }, + { + "name": "public_delegated_prefix_resource", + "type": "google.cloud.compute_v1.types.PublicDelegatedPrefix" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -32017,14 +33233,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.public_delegated_prefixes.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_public_delegated_prefixes_aggregated_list_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_public_delegated_prefixes_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_AggregatedList_sync", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Insert_sync", "segments": [ { "end": 52, @@ -32042,22 +33258,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_public_delegated_prefixes_aggregated_list_sync.py" + "title": "compute_v1_generated_public_delegated_prefixes_insert_sync.py" }, { "canonical": true, @@ -32066,19 +33282,19 @@ "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", "shortName": "PublicDelegatedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.delete", + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.list", "method": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Delete", + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.List", "service": { "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", "shortName": "PublicDelegatedPrefixes" }, - "shortName": "Delete" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeletePublicDelegatedPrefixeRequest" + "type": "google.cloud.compute_v1.types.ListPublicDelegatedPrefixesRequest" }, { "name": "project", @@ -32088,10 +33304,6 @@ "name": "region", "type": "str" }, - { - "name": "public_delegated_prefix", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -32105,14 +33317,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.services.public_delegated_prefixes.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_public_delegated_prefixes_delete_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_public_delegated_prefixes_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Delete_sync", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_List_sync", "segments": [ { "end": 53, @@ -32130,22 +33342,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_public_delegated_prefixes_delete_sync.py" + "title": "compute_v1_generated_public_delegated_prefixes_list_sync.py" }, { "canonical": true, @@ -32154,19 +33366,19 @@ "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", "shortName": "PublicDelegatedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.get", + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.patch", "method": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Get", + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Patch", "service": { "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", "shortName": "PublicDelegatedPrefixes" }, - "shortName": "Get" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetPublicDelegatedPrefixeRequest" + "type": "google.cloud.compute_v1.types.PatchPublicDelegatedPrefixeRequest" }, { "name": "project", @@ -32180,6 +33392,10 @@ "name": "public_delegated_prefix", "type": "str" }, + { + "name": "public_delegated_prefix_resource", + "type": "google.cloud.compute_v1.types.PublicDelegatedPrefix" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -32193,14 +33409,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.PublicDelegatedPrefix", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" }, - "description": "Sample for Get", - "file": "compute_v1_generated_public_delegated_prefixes_get_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_public_delegated_prefixes_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Get_sync", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Patch_sync", "segments": [ { "end": 53, @@ -32233,179 +33449,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_public_delegated_prefixes_get_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", - "shortName": "PublicDelegatedPrefixesClient" - }, - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.insert", - "method": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Insert", - "service": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", - "shortName": "PublicDelegatedPrefixes" - }, - "shortName": "Insert" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.compute_v1.types.InsertPublicDelegatedPrefixeRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "public_delegated_prefix_resource", - "type": "google.cloud.compute_v1.types.PublicDelegatedPrefix" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" - }, - "description": "Sample for Insert", - "file": "compute_v1_generated_public_delegated_prefixes_insert_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Insert_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "compute_v1_generated_public_delegated_prefixes_insert_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", - "shortName": "PublicDelegatedPrefixesClient" - }, - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.list", - "method": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.List", - "service": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", - "shortName": "PublicDelegatedPrefixes" - }, - "shortName": "List" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.compute_v1.types.ListPublicDelegatedPrefixesRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.compute_v1.services.public_delegated_prefixes.pagers.ListPager", - "shortName": "list" - }, - "description": "Sample for List", - "file": "compute_v1_generated_public_delegated_prefixes_list_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_List_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "compute_v1_generated_public_delegated_prefixes_list_sync.py" + "title": "compute_v1_generated_public_delegated_prefixes_patch_sync.py" }, { "canonical": true, @@ -32414,19 +33458,19 @@ "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", "shortName": "PublicDelegatedPrefixesClient" }, - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.patch", + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.withdraw", "method": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Patch", + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Withdraw", "service": { "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", "shortName": "PublicDelegatedPrefixes" }, - "shortName": "Patch" + "shortName": "Withdraw" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchPublicDelegatedPrefixeRequest" + "type": "google.cloud.compute_v1.types.WithdrawPublicDelegatedPrefixeRequest" }, { "name": "project", @@ -32440,10 +33484,6 @@ "name": "public_delegated_prefix", "type": "str" }, - { - "name": "public_delegated_prefix_resource", - "type": "google.cloud.compute_v1.types.PublicDelegatedPrefix" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -32458,13 +33498,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "shortName": "withdraw" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_public_delegated_prefixes_patch_sync.py", + "description": "Sample for Withdraw", + "file": "compute_v1_generated_public_delegated_prefixes_withdraw_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Patch_sync", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Withdraw_sync", "segments": [ { "end": 53, @@ -32497,7 +33537,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_public_delegated_prefixes_patch_sync.py" + "title": "compute_v1_generated_public_delegated_prefixes_withdraw_sync.py" }, { "canonical": true, @@ -33201,7 +34241,95 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_get_health_sync.py" + "title": "compute_v1_generated_region_backend_services_get_health_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_region_backend_services_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_get_iam_policy_sync.py" }, { "canonical": true, @@ -33210,19 +34338,19 @@ "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", "shortName": "RegionBackendServicesClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.GetIamPolicy", + "fullName": "google.cloud.compute.v1.RegionBackendServices.Get", "service": { "fullName": "google.cloud.compute.v1.RegionBackendServices", "shortName": "RegionBackendServices" }, - "shortName": "GetIamPolicy" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyRegionBackendServiceRequest" + "type": "google.cloud.compute_v1.types.GetRegionBackendServiceRequest" }, { "name": "project", @@ -33233,7 +34361,7 @@ "type": "str" }, { - "name": "resource", + "name": "backend_service", "type": "str" }, { @@ -33249,14 +34377,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.cloud.compute_v1.types.BackendService", + "shortName": "get" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_region_backend_services_get_iam_policy_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_backend_services_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_RegionBackendServices_Get_sync", "segments": [ { "end": 53, @@ -33289,7 +34417,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_get_iam_policy_sync.py" + "title": "compute_v1_generated_region_backend_services_get_sync.py" }, { "canonical": true, @@ -33298,19 +34426,19 @@ "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", "shortName": "RegionBackendServicesClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.get", + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.Get", + "fullName": "google.cloud.compute.v1.RegionBackendServices.Insert", "service": { "fullName": "google.cloud.compute.v1.RegionBackendServices", "shortName": "RegionBackendServices" }, - "shortName": "Get" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionBackendServiceRequest" + "type": "google.cloud.compute_v1.types.InsertRegionBackendServiceRequest" }, { "name": "project", @@ -33321,8 +34449,8 @@ "type": "str" }, { - "name": "backend_service", - "type": "str" + "name": "backend_service_resource", + "type": "google.cloud.compute_v1.types.BackendService" }, { "name": "retry", @@ -33337,22 +34465,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.BackendService", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_backend_services_get_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_region_backend_services_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_Get_sync", + "regionTag": "compute_v1_generated_RegionBackendServices_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -33362,22 +34490,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_get_sync.py" + "title": "compute_v1_generated_region_backend_services_insert_sync.py" }, { "canonical": true, @@ -33386,19 +34514,19 @@ "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", "shortName": "RegionBackendServicesClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.insert", + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.list_usable", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.Insert", + "fullName": "google.cloud.compute.v1.RegionBackendServices.ListUsable", "service": { "fullName": "google.cloud.compute.v1.RegionBackendServices", "shortName": "RegionBackendServices" }, - "shortName": "Insert" + "shortName": "ListUsable" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionBackendServiceRequest" + "type": "google.cloud.compute_v1.types.ListUsableRegionBackendServicesRequest" }, { "name": "project", @@ -33408,10 +34536,6 @@ "name": "region", "type": "str" }, - { - "name": "backend_service_resource", - "type": "google.cloud.compute_v1.types.BackendService" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -33425,22 +34549,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.services.region_backend_services.pagers.ListUsablePager", + "shortName": "list_usable" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_region_backend_services_insert_sync.py", + "description": "Sample for ListUsable", + "file": "compute_v1_generated_region_backend_services_list_usable_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_Insert_sync", + "regionTag": "compute_v1_generated_RegionBackendServices_ListUsable_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -33460,12 +34584,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 54, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_insert_sync.py" + "title": "compute_v1_generated_region_backend_services_list_usable_sync.py" }, { "canonical": true, @@ -33733,7 +34857,191 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_set_iam_policy_sync.py" + "title": "compute_v1_generated_region_backend_services_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.set_security_policy", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.SetSecurityPolicy", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "SetSecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetSecurityPolicyRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "security_policy_reference_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_security_policy" + }, + "description": "Sample for SetSecurityPolicy", + "file": "compute_v1_generated_region_backend_services_set_security_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_SetSecurityPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_set_security_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_backend_services_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_test_iam_permissions_sync.py" }, { "canonical": true, @@ -39439,6 +40747,98 @@ ], "title": "compute_v1_generated_region_instances_bulk_insert_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", + "shortName": "RegionNetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.attach_network_endpoints", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.AttachNetworkEndpoints", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", + "shortName": "RegionNetworkEndpointGroups" + }, + "shortName": "AttachNetworkEndpoints" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "region_network_endpoint_groups_attach_endpoints_request_resource", + "type": "google.cloud.compute_v1.types.RegionNetworkEndpointGroupsAttachEndpointsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "attach_network_endpoints" + }, + "description": "Sample for AttachNetworkEndpoints", + "file": "compute_v1_generated_region_network_endpoint_groups_attach_network_endpoints_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_AttachNetworkEndpoints_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_endpoint_groups_attach_network_endpoints_sync.py" + }, { "canonical": true, "clientMethod": { @@ -39485,14 +40885,194 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_network_endpoint_groups_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_endpoint_groups_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", + "shortName": "RegionNetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.detach_network_endpoints", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.DetachNetworkEndpoints", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", + "shortName": "RegionNetworkEndpointGroups" + }, + "shortName": "DetachNetworkEndpoints" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "region_network_endpoint_groups_detach_endpoints_request_resource", + "type": "google.cloud.compute_v1.types.RegionNetworkEndpointGroupsDetachEndpointsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "detach_network_endpoints" + }, + "description": "Sample for DetachNetworkEndpoints", + "file": "compute_v1_generated_region_network_endpoint_groups_detach_network_endpoints_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_DetachNetworkEndpoints_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_endpoint_groups_detach_network_endpoints_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", + "shortName": "RegionNetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", + "shortName": "RegionNetworkEndpointGroups" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.NetworkEndpointGroup", + "shortName": "get" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_network_endpoint_groups_delete_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_network_endpoint_groups_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Delete_sync", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Get_sync", "segments": [ { "end": 53, @@ -39525,7 +41105,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_endpoint_groups_delete_sync.py" + "title": "compute_v1_generated_region_network_endpoint_groups_get_sync.py" }, { "canonical": true, @@ -39534,19 +41114,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", "shortName": "RegionNetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.get", + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.Get", + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.Insert", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", "shortName": "RegionNetworkEndpointGroups" }, - "shortName": "Get" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionNetworkEndpointGroupRequest" + "type": "google.cloud.compute_v1.types.InsertRegionNetworkEndpointGroupRequest" }, { "name": "project", @@ -39557,8 +41137,8 @@ "type": "str" }, { - "name": "network_endpoint_group", - "type": "str" + "name": "network_endpoint_group_resource", + "type": "google.cloud.compute_v1.types.NetworkEndpointGroup" }, { "name": "retry", @@ -39573,22 +41153,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.NetworkEndpointGroup", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_network_endpoint_groups_get_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_region_network_endpoint_groups_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Get_sync", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -39598,22 +41178,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_endpoint_groups_get_sync.py" + "title": "compute_v1_generated_region_network_endpoint_groups_insert_sync.py" }, { "canonical": true, @@ -39622,19 +41202,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", "shortName": "RegionNetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.insert", + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.list_network_endpoints", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.Insert", + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.ListNetworkEndpoints", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", "shortName": "RegionNetworkEndpointGroups" }, - "shortName": "Insert" + "shortName": "ListNetworkEndpoints" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionNetworkEndpointGroupRequest" + "type": "google.cloud.compute_v1.types.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest" }, { "name": "project", @@ -39645,8 +41225,8 @@ "type": "str" }, { - "name": "network_endpoint_group_resource", - "type": "google.cloud.compute_v1.types.NetworkEndpointGroup" + "name": "network_endpoint_group", + "type": "str" }, { "name": "retry", @@ -39661,22 +41241,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.services.region_network_endpoint_groups.pagers.ListNetworkEndpointsPager", + "shortName": "list_network_endpoints" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_region_network_endpoint_groups_insert_sync.py", + "description": "Sample for ListNetworkEndpoints", + "file": "compute_v1_generated_region_network_endpoint_groups_list_network_endpoints_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Insert_sync", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_ListNetworkEndpoints_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -39686,22 +41266,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 55, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_endpoint_groups_insert_sync.py" + "title": "compute_v1_generated_region_network_endpoint_groups_list_network_endpoints_sync.py" }, { "canonical": true, @@ -40321,7 +41901,183 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_get_effective_firewalls_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_get_effective_firewalls_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_region_network_firewall_policies_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_rule", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetRule", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "GetRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRuleRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.FirewallPolicyRule", + "shortName": "get_rule" + }, + "description": "Sample for GetRule", + "file": "compute_v1_generated_region_network_firewall_policies_get_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetRule_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_get_rule_sync.py" }, { "canonical": true, @@ -40330,19 +42086,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetIamPolicy", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Get", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "GetIamPolicy" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.GetRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -40353,7 +42109,7 @@ "type": "str" }, { - "name": "resource", + "name": "firewall_policy", "type": "str" }, { @@ -40369,14 +42125,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.cloud.compute_v1.types.FirewallPolicy", + "shortName": "get" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_region_network_firewall_policies_get_iam_policy_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_network_firewall_policies_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Get_sync", "segments": [ { "end": 53, @@ -40409,7 +42165,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_get_iam_policy_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_get_sync.py" }, { "canonical": true, @@ -40418,19 +42174,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_rule", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetRule", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Insert", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "GetRule" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRuleRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.InsertRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -40441,8 +42197,8 @@ "type": "str" }, { - "name": "firewall_policy", - "type": "str" + "name": "firewall_policy_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicy" }, { "name": "retry", @@ -40457,22 +42213,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.FirewallPolicyRule", - "shortName": "get_rule" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for GetRule", - "file": "compute_v1_generated_region_network_firewall_policies_get_rule_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_region_network_firewall_policies_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetRule_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -40482,22 +42238,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_get_rule_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_insert_sync.py" }, { "canonical": true, @@ -40506,19 +42262,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Get", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.List", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "Get" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.ListRegionNetworkFirewallPoliciesRequest" }, { "name": "project", @@ -40528,10 +42284,6 @@ "name": "region", "type": "str" }, - { - "name": "firewall_policy", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -40545,14 +42297,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.FirewallPolicy", - "shortName": "get" + "resultType": "google.cloud.compute_v1.services.region_network_firewall_policies.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_network_firewall_policies_get_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_network_firewall_policies_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Get_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_List_sync", "segments": [ { "end": 53, @@ -40570,22 +42322,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_get_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_list_sync.py" }, { "canonical": true, @@ -40594,19 +42346,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.insert", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.patch_rule", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Insert", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.PatchRule", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "Insert" + "shortName": "PatchRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.PatchRuleRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -40617,8 +42369,12 @@ "type": "str" }, { - "name": "firewall_policy_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicy" + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_rule_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyRule" }, { "name": "retry", @@ -40634,21 +42390,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "patch_rule" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_region_network_firewall_policies_insert_sync.py", + "description": "Sample for PatchRule", + "file": "compute_v1_generated_region_network_firewall_policies_patch_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Insert_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_PatchRule_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -40658,22 +42414,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_insert_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_patch_rule_sync.py" }, { "canonical": true, @@ -40682,19 +42438,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.list", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.patch", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.List", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Patch", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "List" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionNetworkFirewallPoliciesRequest" + "type": "google.cloud.compute_v1.types.PatchRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -40704,6 +42460,14 @@ "name": "region", "type": "str" }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicy" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -40717,14 +42481,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.region_network_firewall_policies.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" }, - "description": "Sample for List", - "file": "compute_v1_generated_region_network_firewall_policies_list_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_region_network_firewall_policies_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_List_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Patch_sync", "segments": [ { "end": 53, @@ -40742,22 +42506,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_list_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_patch_sync.py" }, { "canonical": true, @@ -40766,19 +42530,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.patch_rule", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.remove_association", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.PatchRule", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.RemoveAssociation", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "PatchRule" + "shortName": "RemoveAssociation" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchRuleRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.RemoveAssociationRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -40792,10 +42556,6 @@ "name": "firewall_policy", "type": "str" }, - { - "name": "firewall_policy_rule_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicyRule" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -40810,13 +42570,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch_rule" + "shortName": "remove_association" }, - "description": "Sample for PatchRule", - "file": "compute_v1_generated_region_network_firewall_policies_patch_rule_sync.py", + "description": "Sample for RemoveAssociation", + "file": "compute_v1_generated_region_network_firewall_policies_remove_association_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_PatchRule_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_RemoveAssociation_sync", "segments": [ { "end": 53, @@ -40849,7 +42609,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_patch_rule_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_remove_association_sync.py" }, { "canonical": true, @@ -40858,19 +42618,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.patch", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.remove_rule", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Patch", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.RemoveRule", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "Patch" + "shortName": "RemoveRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.RemoveRuleRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -40884,10 +42644,6 @@ "name": "firewall_policy", "type": "str" }, - { - "name": "firewall_policy_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicy" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -40902,13 +42658,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "shortName": "remove_rule" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_region_network_firewall_policies_patch_sync.py", + "description": "Sample for RemoveRule", + "file": "compute_v1_generated_region_network_firewall_policies_remove_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Patch_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_RemoveRule_sync", "segments": [ { "end": 53, @@ -40941,7 +42697,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_patch_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_remove_rule_sync.py" }, { "canonical": true, @@ -40950,19 +42706,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.remove_association", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.RemoveAssociation", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.SetIamPolicy", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "RemoveAssociation" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.RemoveAssociationRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -40973,9 +42729,13 @@ "type": "str" }, { - "name": "firewall_policy", + "name": "resource", "type": "str" }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -40989,14 +42749,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "remove_association" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for RemoveAssociation", - "file": "compute_v1_generated_region_network_firewall_policies_remove_association_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_region_network_firewall_policies_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_RemoveAssociation_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_SetIamPolicy_sync", "segments": [ { "end": 53, @@ -41029,7 +42789,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_remove_association_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_set_iam_policy_sync.py" }, { "canonical": true, @@ -41038,19 +42798,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.remove_rule", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.RemoveRule", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.TestIamPermissions", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "RemoveRule" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.RemoveRuleRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -41061,9 +42821,13 @@ "type": "str" }, { - "name": "firewall_policy", + "name": "resource", "type": "str" }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -41077,14 +42841,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "remove_rule" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for RemoveRule", - "file": "compute_v1_generated_region_network_firewall_policies_remove_rule_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_network_firewall_policies_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_RemoveRule_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -41117,28 +42881,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_remove_rule_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", - "shortName": "RegionNetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", + "shortName": "RegionNotificationEndpointsClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.SetIamPolicy", + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", - "shortName": "RegionNetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", + "shortName": "RegionNotificationEndpoints" }, - "shortName": "SetIamPolicy" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionNotificationEndpointRequest" }, { "name": "project", @@ -41149,13 +42913,9 @@ "type": "str" }, { - "name": "resource", + "name": "notification_endpoint", "type": "str" }, - { - "name": "region_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -41169,14 +42929,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_region_network_firewall_policies_set_iam_policy_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_notification_endpoints_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Delete_sync", "segments": [ { "end": 53, @@ -41209,28 +42969,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_set_iam_policy_sync.py" + "title": "compute_v1_generated_region_notification_endpoints_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", - "shortName": "RegionNetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", + "shortName": "RegionNotificationEndpointsClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.TestIamPermissions", + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", - "shortName": "RegionNetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", + "shortName": "RegionNotificationEndpoints" }, - "shortName": "TestIamPermissions" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.GetRegionNotificationEndpointRequest" }, { "name": "project", @@ -41241,13 +43001,9 @@ "type": "str" }, { - "name": "resource", + "name": "notification_endpoint", "type": "str" }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -41261,14 +43017,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.types.NotificationEndpoint", + "shortName": "get" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_region_network_firewall_policies_test_iam_permissions_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_notification_endpoints_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Get_sync", "segments": [ { "end": 53, @@ -41301,7 +43057,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_test_iam_permissions_sync.py" + "title": "compute_v1_generated_region_notification_endpoints_get_sync.py" }, { "canonical": true, @@ -41310,19 +43066,19 @@ "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", "shortName": "RegionNotificationEndpointsClient" }, - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.delete", + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Delete", + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Insert", "service": { "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", "shortName": "RegionNotificationEndpoints" }, - "shortName": "Delete" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionNotificationEndpointRequest" + "type": "google.cloud.compute_v1.types.InsertRegionNotificationEndpointRequest" }, { "name": "project", @@ -41333,8 +43089,8 @@ "type": "str" }, { - "name": "notification_endpoint", - "type": "str" + "name": "notification_endpoint_resource", + "type": "google.cloud.compute_v1.types.NotificationEndpoint" }, { "name": "retry", @@ -41350,21 +43106,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "insert" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_notification_endpoints_delete_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_region_notification_endpoints_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Delete_sync", + "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -41374,22 +43130,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_notification_endpoints_delete_sync.py" + "title": "compute_v1_generated_region_notification_endpoints_insert_sync.py" }, { "canonical": true, @@ -41398,19 +43154,19 @@ "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", "shortName": "RegionNotificationEndpointsClient" }, - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.get", + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Get", + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.List", "service": { "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", "shortName": "RegionNotificationEndpoints" }, - "shortName": "Get" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionNotificationEndpointRequest" + "type": "google.cloud.compute_v1.types.ListRegionNotificationEndpointsRequest" }, { "name": "project", @@ -41420,10 +43176,6 @@ "name": "region", "type": "str" }, - { - "name": "notification_endpoint", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -41437,14 +43189,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.NotificationEndpoint", - "shortName": "get" + "resultType": "google.cloud.compute_v1.services.region_notification_endpoints.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_notification_endpoints_get_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_notification_endpoints_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Get_sync", + "regionTag": "compute_v1_generated_RegionNotificationEndpoints_List_sync", "segments": [ { "end": 53, @@ -41462,43 +43214,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_notification_endpoints_get_sync.py" + "title": "compute_v1_generated_region_notification_endpoints_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", - "shortName": "RegionNotificationEndpointsClient" + "fullName": "google.cloud.compute_v1.RegionOperationsClient", + "shortName": "RegionOperationsClient" }, - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.insert", + "fullName": "google.cloud.compute_v1.RegionOperationsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Insert", + "fullName": "google.cloud.compute.v1.RegionOperations.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", - "shortName": "RegionNotificationEndpoints" + "fullName": "google.cloud.compute.v1.RegionOperations", + "shortName": "RegionOperations" }, - "shortName": "Insert" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionNotificationEndpointRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionOperationRequest" }, { "name": "project", @@ -41509,8 +43261,8 @@ "type": "str" }, { - "name": "notification_endpoint_resource", - "type": "google.cloud.compute_v1.types.NotificationEndpoint" + "name": "operation", + "type": "str" }, { "name": "retry", @@ -41525,22 +43277,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.types.DeleteRegionOperationResponse", + "shortName": "delete" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_region_notification_endpoints_insert_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_operations_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Insert_sync", + "regionTag": "compute_v1_generated_RegionOperations_Delete_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -41550,43 +43302,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_notification_endpoints_insert_sync.py" + "title": "compute_v1_generated_region_operations_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", - "shortName": "RegionNotificationEndpointsClient" + "fullName": "google.cloud.compute_v1.RegionOperationsClient", + "shortName": "RegionOperationsClient" }, - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.list", + "fullName": "google.cloud.compute_v1.RegionOperationsClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.List", + "fullName": "google.cloud.compute.v1.RegionOperations.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", - "shortName": "RegionNotificationEndpoints" + "fullName": "google.cloud.compute.v1.RegionOperations", + "shortName": "RegionOperations" }, - "shortName": "List" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionNotificationEndpointsRequest" + "type": "google.cloud.compute_v1.types.GetRegionOperationRequest" }, { "name": "project", @@ -41596,6 +43348,10 @@ "name": "region", "type": "str" }, + { + "name": "operation", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -41609,14 +43365,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.region_notification_endpoints.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.types.Operation", + "shortName": "get" }, - "description": "Sample for List", - "file": "compute_v1_generated_region_notification_endpoints_list_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_operations_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNotificationEndpoints_List_sync", + "regionTag": "compute_v1_generated_RegionOperations_Get_sync", "segments": [ { "end": 53, @@ -41634,22 +43390,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_notification_endpoints_list_sync.py" + "title": "compute_v1_generated_region_operations_get_sync.py" }, { "canonical": true, @@ -41658,19 +43414,19 @@ "fullName": "google.cloud.compute_v1.RegionOperationsClient", "shortName": "RegionOperationsClient" }, - "fullName": "google.cloud.compute_v1.RegionOperationsClient.delete", + "fullName": "google.cloud.compute_v1.RegionOperationsClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionOperations.Delete", + "fullName": "google.cloud.compute.v1.RegionOperations.List", "service": { "fullName": "google.cloud.compute.v1.RegionOperations", "shortName": "RegionOperations" }, - "shortName": "Delete" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionOperationRequest" + "type": "google.cloud.compute_v1.types.ListRegionOperationsRequest" }, { "name": "project", @@ -41680,10 +43436,6 @@ "name": "region", "type": "str" }, - { - "name": "operation", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -41697,14 +43449,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.DeleteRegionOperationResponse", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.services.region_operations.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_operations_delete_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_operations_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionOperations_Delete_sync", + "regionTag": "compute_v1_generated_RegionOperations_List_sync", "segments": [ { "end": 53, @@ -41722,22 +43474,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_operations_delete_sync.py" + "title": "compute_v1_generated_region_operations_list_sync.py" }, { "canonical": true, @@ -41746,19 +43498,19 @@ "fullName": "google.cloud.compute_v1.RegionOperationsClient", "shortName": "RegionOperationsClient" }, - "fullName": "google.cloud.compute_v1.RegionOperationsClient.get", + "fullName": "google.cloud.compute_v1.RegionOperationsClient.wait", "method": { - "fullName": "google.cloud.compute.v1.RegionOperations.Get", + "fullName": "google.cloud.compute.v1.RegionOperations.Wait", "service": { "fullName": "google.cloud.compute.v1.RegionOperations", "shortName": "RegionOperations" }, - "shortName": "Get" + "shortName": "Wait" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionOperationRequest" + "type": "google.cloud.compute_v1.types.WaitRegionOperationRequest" }, { "name": "project", @@ -41786,13 +43538,13 @@ } ], "resultType": "google.cloud.compute_v1.types.Operation", - "shortName": "get" + "shortName": "wait" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_operations_get_sync.py", + "description": "Sample for Wait", + "file": "compute_v1_generated_region_operations_wait_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionOperations_Get_sync", + "regionTag": "compute_v1_generated_RegionOperations_Wait_sync", "segments": [ { "end": 53, @@ -41825,28 +43577,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_operations_get_sync.py" + "title": "compute_v1_generated_region_operations_wait_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionOperationsClient", - "shortName": "RegionOperationsClient" + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionOperationsClient.list", + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.add_rule", "method": { - "fullName": "google.cloud.compute.v1.RegionOperations.List", + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.AddRule", "service": { - "fullName": "google.cloud.compute.v1.RegionOperations", - "shortName": "RegionOperations" + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" }, - "shortName": "List" + "shortName": "AddRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionOperationsRequest" + "type": "google.cloud.compute_v1.types.AddRuleRegionSecurityPolicyRequest" }, { "name": "project", @@ -41856,6 +43608,14 @@ "name": "region", "type": "str" }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "security_policy_rule_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyRule" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -41869,14 +43629,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.region_operations.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_rule" }, - "description": "Sample for List", - "file": "compute_v1_generated_region_operations_list_sync.py", + "description": "Sample for AddRule", + "file": "compute_v1_generated_region_security_policies_add_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionOperations_List_sync", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_AddRule_sync", "segments": [ { "end": 53, @@ -41894,43 +43654,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_operations_list_sync.py" + "title": "compute_v1_generated_region_security_policies_add_rule_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionOperationsClient", - "shortName": "RegionOperationsClient" + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionOperationsClient.wait", + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionOperations.Wait", + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionOperations", - "shortName": "RegionOperations" + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" }, - "shortName": "Wait" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.WaitRegionOperationRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionSecurityPolicyRequest" }, { "name": "project", @@ -41941,7 +43701,7 @@ "type": "str" }, { - "name": "operation", + "name": "security_policy", "type": "str" }, { @@ -41957,14 +43717,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Operation", - "shortName": "wait" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for Wait", - "file": "compute_v1_generated_region_operations_wait_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_security_policies_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionOperations_Wait_sync", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_Delete_sync", "segments": [ { "end": 53, @@ -41997,7 +43757,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_operations_wait_sync.py" + "title": "compute_v1_generated_region_security_policies_delete_sync.py" }, { "canonical": true, @@ -42006,19 +43766,19 @@ "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", "shortName": "RegionSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.delete", + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.get_rule", "method": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.Delete", + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.GetRule", "service": { "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", "shortName": "RegionSecurityPolicies" }, - "shortName": "Delete" + "shortName": "GetRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.GetRuleRegionSecurityPolicyRequest" }, { "name": "project", @@ -42045,14 +43805,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.types.SecurityPolicyRule", + "shortName": "get_rule" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_security_policies_delete_sync.py", + "description": "Sample for GetRule", + "file": "compute_v1_generated_region_security_policies_get_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionSecurityPolicies_Delete_sync", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_GetRule_sync", "segments": [ { "end": 53, @@ -42085,7 +43845,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_security_policies_delete_sync.py" + "title": "compute_v1_generated_region_security_policies_get_rule_sync.py" }, { "canonical": true, @@ -42347,6 +44107,98 @@ ], "title": "compute_v1_generated_region_security_policies_list_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.patch_rule", + "method": { + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.PatchRule", + "service": { + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" + }, + "shortName": "PatchRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRuleRegionSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "security_policy_rule_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyRule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch_rule" + }, + "description": "Sample for PatchRule", + "file": "compute_v1_generated_region_security_policies_patch_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_PatchRule_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_security_policies_patch_rule_sync.py" + }, { "canonical": true, "clientMethod": { @@ -42439,6 +44291,94 @@ ], "title": "compute_v1_generated_region_security_policies_patch_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.remove_rule", + "method": { + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.RemoveRule", + "service": { + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" + }, + "shortName": "RemoveRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemoveRuleRegionSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_rule" + }, + "description": "Sample for RemoveRule", + "file": "compute_v1_generated_region_security_policies_remove_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_RemoveRule_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_security_policies_remove_rule_sync.py" + }, { "canonical": true, "clientMethod": { @@ -47355,6 +49295,94 @@ ], "title": "compute_v1_generated_routers_delete_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutersClient", + "shortName": "RoutersClient" + }, + "fullName": "google.cloud.compute_v1.RoutersClient.get_nat_ip_info", + "method": { + "fullName": "google.cloud.compute.v1.Routers.GetNatIpInfo", + "service": { + "fullName": "google.cloud.compute.v1.Routers", + "shortName": "Routers" + }, + "shortName": "GetNatIpInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetNatIpInfoRouterRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "router", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.NatIpInfoResponse", + "shortName": "get_nat_ip_info" + }, + "description": "Sample for GetNatIpInfo", + "file": "compute_v1_generated_routers_get_nat_ip_info_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routers_GetNatIpInfo_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routers_get_nat_ip_info_sync.py" + }, { "canonical": true, "clientMethod": { @@ -50017,45 +52045,217 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_service_attachments_patch_sync.py" + "title": "compute_v1_generated_service_attachments_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient", + "shortName": "ServiceAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.ServiceAttachments.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.ServiceAttachments", + "shortName": "ServiceAttachments" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyServiceAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_service_attachments_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ServiceAttachments_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_service_attachments_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient", + "shortName": "ServiceAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.ServiceAttachments.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.ServiceAttachments", + "shortName": "ServiceAttachments" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsServiceAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_service_attachments_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ServiceAttachments_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_service_attachments_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient", - "shortName": "ServiceAttachmentsClient" + "fullName": "google.cloud.compute_v1.SnapshotSettingsServiceClient", + "shortName": "SnapshotSettingsServiceClient" }, - "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.SnapshotSettingsServiceClient.get", "method": { - "fullName": "google.cloud.compute.v1.ServiceAttachments.SetIamPolicy", + "fullName": "google.cloud.compute.v1.SnapshotSettingsService.Get", "service": { - "fullName": "google.cloud.compute.v1.ServiceAttachments", - "shortName": "ServiceAttachments" + "fullName": "google.cloud.compute.v1.SnapshotSettingsService", + "shortName": "SnapshotSettingsService" }, - "shortName": "SetIamPolicy" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyServiceAttachmentRequest" + "type": "google.cloud.compute_v1.types.GetSnapshotSettingRequest" }, { "name": "project", "type": "str" }, - { - "name": "region", - "type": "str" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "region_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -50069,22 +52269,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.cloud.compute_v1.types.SnapshotSettings", + "shortName": "get" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_service_attachments_set_iam_policy_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_snapshot_settings_service_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_ServiceAttachments_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_SnapshotSettingsService_Get_sync", "segments": [ { - "end": 53, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 51, "start": 27, "type": "SHORT" }, @@ -50094,59 +52294,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_service_attachments_set_iam_policy_sync.py" + "title": "compute_v1_generated_snapshot_settings_service_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient", - "shortName": "ServiceAttachmentsClient" + "fullName": "google.cloud.compute_v1.SnapshotSettingsServiceClient", + "shortName": "SnapshotSettingsServiceClient" }, - "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.SnapshotSettingsServiceClient.patch", "method": { - "fullName": "google.cloud.compute.v1.ServiceAttachments.TestIamPermissions", + "fullName": "google.cloud.compute.v1.SnapshotSettingsService.Patch", "service": { - "fullName": "google.cloud.compute.v1.ServiceAttachments", - "shortName": "ServiceAttachments" + "fullName": "google.cloud.compute.v1.SnapshotSettingsService", + "shortName": "SnapshotSettingsService" }, - "shortName": "TestIamPermissions" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsServiceAttachmentRequest" + "type": "google.cloud.compute_v1.types.PatchSnapshotSettingRequest" }, { "name": "project", "type": "str" }, { - "name": "region", - "type": "str" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + "name": "snapshot_settings_resource", + "type": "google.cloud.compute_v1.types.SnapshotSettings" }, { "name": "retry", @@ -50161,22 +52353,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_service_attachments_test_iam_permissions_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_snapshot_settings_service_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_ServiceAttachments_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_SnapshotSettingsService_Patch_sync", "segments": [ { - "end": 53, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 51, "start": 27, "type": "SHORT" }, @@ -50186,22 +52378,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_service_attachments_test_iam_permissions_sync.py" + "title": "compute_v1_generated_snapshot_settings_service_patch_sync.py" }, { "canonical": true, @@ -55307,6 +57499,98 @@ ], "title": "compute_v1_generated_target_instances_list_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetInstancesClient", + "shortName": "TargetInstancesClient" + }, + "fullName": "google.cloud.compute_v1.TargetInstancesClient.set_security_policy", + "method": { + "fullName": "google.cloud.compute.v1.TargetInstances.SetSecurityPolicy", + "service": { + "fullName": "google.cloud.compute.v1.TargetInstances", + "shortName": "TargetInstances" + }, + "shortName": "SetSecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetSecurityPolicyTargetInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "target_instance", + "type": "str" + }, + { + "name": "security_policy_reference_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_security_policy" + }, + "description": "Sample for SetSecurityPolicy", + "file": "compute_v1_generated_target_instances_set_security_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetInstances_SetSecurityPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_instances_set_security_policy_sync.py" + }, { "canonical": true, "clientMethod": { @@ -56287,6 +58571,98 @@ ], "title": "compute_v1_generated_target_pools_set_backup_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetPoolsClient", + "shortName": "TargetPoolsClient" + }, + "fullName": "google.cloud.compute_v1.TargetPoolsClient.set_security_policy", + "method": { + "fullName": "google.cloud.compute.v1.TargetPools.SetSecurityPolicy", + "service": { + "fullName": "google.cloud.compute.v1.TargetPools", + "shortName": "TargetPools" + }, + "shortName": "SetSecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetSecurityPolicyTargetPoolRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_pool", + "type": "str" + }, + { + "name": "security_policy_reference_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_security_policy" + }, + "description": "Sample for SetSecurityPolicy", + "file": "compute_v1_generated_target_pools_set_security_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetPools_SetSecurityPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_pools_set_security_policy_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-compute/scripts/fixup_compute_v1_keywords.py b/packages/google-cloud-compute/scripts/fixup_compute_v1_keywords.py index 318c54727166..84eff6387156 100644 --- a/packages/google-cloud-compute/scripts/fixup_compute_v1_keywords.py +++ b/packages/google-cloud-compute/scripts/fixup_compute_v1_keywords.py @@ -50,7 +50,8 @@ class computeCallTransformer(cst.CSTTransformer): 'add_resource_policies': ('disk', 'disks_add_resource_policies_request_resource', 'project', 'zone', 'request_id', ), 'add_rule': ('firewall_policy', 'firewall_policy_rule_resource', 'request_id', ), 'add_signed_url_key': ('backend_bucket', 'project', 'signed_url_key_resource', 'request_id', ), - 'aggregated_list': ('project', 'filter', 'include_all_scopes', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'aggregated_list': ('project', 'filter', 'include_all_scopes', 'max_results', 'order_by', 'page_token', 'return_partial_success', 'service_project_number', ), + 'announce': ('project', 'public_advertised_prefix', 'request_id', ), 'apply_updates_to_instances': ('instance_group_manager', 'instance_group_managers_apply_updates_request_resource', 'project', 'zone', ), 'attach_disk': ('attached_disk_resource', 'instance', 'project', 'zone', 'force_attach', 'request_id', ), 'attach_network_endpoints': ('global_network_endpoint_groups_attach_endpoints_request_resource', 'network_endpoint_group', 'project', 'request_id', ), @@ -80,6 +81,8 @@ class computeCallTransformer(cst.CSTTransformer): 'get_guest_attributes': ('instance', 'project', 'zone', 'query_path', 'variable_key', ), 'get_health': ('backend_service', 'project', 'resource_group_reference_resource', ), 'get_iam_policy': ('project', 'resource', 'options_requested_policy_version', ), + 'get_macsec_config': ('interconnect', 'project', ), + 'get_nat_ip_info': ('project', 'region', 'router', 'nat_name', ), 'get_nat_mapping_info': ('project', 'region', 'router', 'filter', 'max_results', 'nat_name', 'order_by', 'page_token', 'return_partial_success', ), 'get_router_status': ('project', 'region', 'router', ), 'get_rule': ('firewall_policy', 'priority', ), @@ -165,7 +168,7 @@ class computeCallTransformer(cst.CSTTransformer): 'stop_group_async_replication': ('disks_stop_group_async_replication_resource_resource', 'project', 'zone', 'request_id', ), 'suspend': ('instance', 'project', 'zone', 'discard_local_ssd', 'request_id', ), 'switch_to_custom_mode': ('network', 'project', 'request_id', ), - 'test_iam_permissions': ('project', 'resource', 'test_permissions_request_resource', 'zone', ), + 'test_iam_permissions': ('project', 'resource', 'test_permissions_request_resource', ), 'update': ('autoscaler_resource', 'project', 'zone', 'autoscaler', 'request_id', ), 'update_access_config': ('access_config_resource', 'instance', 'network_interface', 'project', 'zone', 'request_id', ), 'update_display_device': ('display_device_resource', 'instance', 'project', 'zone', 'request_id', ), @@ -175,6 +178,7 @@ class computeCallTransformer(cst.CSTTransformer): 'update_shielded_instance_config': ('instance', 'project', 'shielded_instance_config_resource', 'zone', 'request_id', ), 'validate': ('project', 'region', 'region_url_maps_validate_request_resource', 'url_map', ), 'wait': ('operation', 'project', ), + 'withdraw': ('project', 'public_advertised_prefix', 'request_id', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-compute/tests/system/__init__.py b/packages/google-cloud-compute/tests/system/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-compute/tests/system/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py index fc06105fde63..080043bc07f0 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py @@ -659,6 +659,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -723,6 +724,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py index 4389a680dcf4..99aa645364b0 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py @@ -633,6 +633,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -697,6 +698,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py index 3506fec8f56e..0b89ac549a8c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py @@ -637,6 +637,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -701,6 +702,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py index 929d4e510683..72cff362d9f3 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py @@ -2963,6 +2963,295 @@ def test_get_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + compute.GetIamPolicyBackendBucketRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag="etag_value", + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields( + request_type=compute.GetIamPolicyBackendBucketRequest, +): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("optionsRequestedPolicyVersion",)) + & set( + ( + "project", + "resource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackendBucketsRestInterceptor, "post_get_iam_policy" + ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, "pre_get_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyBackendBucketRequest.pb( + compute.GetIamPolicyBackendBucketRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyBackendBucketRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicyBackendBucketRequest +): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/backendBuckets/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyBackendBucketRequest(), + project="project_value", + resource="resource_value", + ) + + +def test_get_iam_policy_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -5853,36 +6142,854 @@ def test_set_edge_security_policy_unary_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.UpdateBackendBucketRequest, + compute.SetIamPolicyBackendBucketRequest, dict, ], ) -def test_update_rest(request_type): +def test_set_iam_policy_rest(request_type): client = BackendBucketsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "backend_bucket": "sample2"} - request_init["backend_bucket_resource"] = { - "bucket_name": "bucket_name_value", - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_policy_request_resource"] = { + "bindings": [ + { + "binding_id": "binding_id_value", + "condition": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "members": ["members_value1", "members_value2"], + "role": "role_value", + } + ], + "etag": "etag_value", + "policy": { + "audit_configs": [ + { + "audit_log_configs": [ + { + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "ignore_child_exemptions": True, + "log_type": "log_type_value", + } + ], + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "service": "service_value", + } + ], + "bindings": {}, + "etag": "etag_value", + "iam_owned": True, + "rules": [ + { + "action": "action_value", + "conditions": [ + { + "iam": "iam_value", + "op": "op_value", + "svc": "svc_value", + "sys": "sys_value", + "values": ["values_value1", "values_value2"], + } + ], + "description": "description_value", + "ins": ["ins_value1", "ins_value2"], + "log_configs": [ + { + "cloud_audit": { + "authorization_logging_options": { + "permission_type": "permission_type_value" + }, + "log_name": "log_name_value", + }, + "counter": { + "custom_fields": [ + {"name": "name_value", "value": "value_value"} + ], + "field": "field_value", + "metric": "metric_value", + }, + "data_access": {"log_mode": "log_mode_value"}, + } + ], + "not_ins": ["not_ins_value1", "not_ins_value2"], + "permissions": ["permissions_value1", "permissions_value2"], + } + ], + "version": 774, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyBackendBucketRequest.meta.fields[ + "global_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "global_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["global_set_policy_request_resource"][field]) + ): + del request_init["global_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["global_set_policy_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag="etag_value", + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields( + request_type=compute.SetIamPolicyBackendBucketRequest, +): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "globalSetPolicyRequestResource", + "project", + "resource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackendBucketsRestInterceptor, "post_set_iam_policy" + ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, "pre_set_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyBackendBucketRequest.pb( + compute.SetIamPolicyBackendBucketRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyBackendBucketRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyBackendBucketRequest +): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + resource="resource_value", + global_set_policy_request_resource=compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/backendBuckets/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyBackendBucketRequest(), + project="project_value", + resource="resource_value", + global_set_policy_request_resource=compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), + ) + + +def test_set_iam_policy_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.TestIamPermissionsBackendBucketRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["test_permissions_request_resource"] = { + "permissions": ["permissions_value1", "permissions_value2"] + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsBackendBucketRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=["permissions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_rest_required_fields( + request_type=compute.TestIamPermissionsBackendBucketRequest, +): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "resource", + "testPermissionsRequestResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackendBucketsRestInterceptor, "post_test_iam_permissions" + ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, "pre_test_iam_permissions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsBackendBucketRequest.pb( + compute.TestIamPermissionsBackendBucketRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json( + compute.TestPermissionsResponse() + ) + + request = compute.TestIamPermissionsBackendBucketRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=compute.TestIamPermissionsBackendBucketRequest +): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + resource="resource_value", + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/backendBuckets/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsBackendBucketRequest(), + project="project_value", + resource="resource_value", + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), + ) + + +def test_test_iam_permissions_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.UpdateBackendBucketRequest, + dict, + ], +) +def test_update_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_bucket": "sample2"} + request_init["backend_bucket_resource"] = { + "bucket_name": "bucket_name_value", + "cdn_policy": { + "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], + "cache_key_policy": { + "include_http_headers": [ + "include_http_headers_value1", + "include_http_headers_value2", + ], + "query_string_whitelist": [ + "query_string_whitelist_value1", + "query_string_whitelist_value2", + ], + }, + "cache_mode": "cache_mode_value", + "client_ttl": 1074, + "default_ttl": 1176, + "max_ttl": 761, "negative_caching": True, "negative_caching_policy": [{"code": 411, "ttl": 340}], "request_coalescing": True, @@ -6836,10 +7943,13 @@ def test_backend_buckets_base_transport(): "delete", "delete_signed_url_key", "get", + "get_iam_policy", "insert", "list", "patch", "set_edge_security_policy", + "set_iam_policy", + "test_iam_permissions", "update", ) for method in methods: @@ -6990,6 +8100,9 @@ def test_backend_buckets_client_transport_session_collision(transport_name): session1 = client1.transport.get._session session2 = client2.transport.get._session assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 session1 = client1.transport.insert._session session2 = client2.transport.insert._session assert session1 != session2 @@ -7002,6 +8115,12 @@ def test_backend_buckets_client_transport_session_collision(transport_name): session1 = client1.transport.set_edge_security_policy._session session2 = client2.transport.set_edge_security_policy._session assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 session1 = client1.transport.update._session session2 = client2.transport.update._session assert session1 != session2 diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py index 6757eca001d4..d9c52249c3de 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py @@ -1450,6 +1450,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -1514,6 +1515,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) @@ -4194,6 +4196,12 @@ def test_insert_rest(request_type): "region": "region_value", "security_policy": "security_policy_value", "security_settings": { + "aws_v4_authentication": { + "access_key": "access_key_value", + "access_key_id": "access_key_id_value", + "access_key_version": "access_key_version_value", + "origin_region": "origin_region_value", + }, "client_tls_policy": "client_tls_policy_value", "subject_alt_names": [ "subject_alt_names_value1", @@ -4205,6 +4213,7 @@ def test_insert_rest(request_type): "session_affinity": "session_affinity_value", "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, + "used_by": [{"reference": "reference_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -4746,6 +4755,12 @@ def test_insert_unary_rest(request_type): "region": "region_value", "security_policy": "security_policy_value", "security_settings": { + "aws_v4_authentication": { + "access_key": "access_key_value", + "access_key_id": "access_key_id_value", + "access_key_version": "access_key_version_value", + "origin_region": "origin_region_value", + }, "client_tls_policy": "client_tls_policy_value", "subject_alt_names": [ "subject_alt_names_value1", @@ -4757,6 +4772,7 @@ def test_insert_unary_rest(request_type): "session_affinity": "session_affinity_value", "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, + "used_by": [{"reference": "reference_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5474,316 +5490,55 @@ def test_list_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - compute.PatchBackendServiceRequest, + compute.ListUsableBackendServicesRequest, dict, ], ) -def test_patch_rest(request_type): +def test_list_usable_rest(request_type): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["backend_service_resource"] = { - "affinity_cookie_ttl_sec": 2432, - "backends": [ - { - "balancing_mode": "balancing_mode_value", - "capacity_scaler": 0.1575, - "description": "description_value", - "failover": True, - "group": "group_value", - "max_connections": 1608, - "max_connections_per_endpoint": 2990, - "max_connections_per_instance": 2978, - "max_rate": 849, - "max_rate_per_endpoint": 0.22310000000000002, - "max_rate_per_instance": 0.22190000000000001, - "max_utilization": 0.1633, - } - ], - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_host": True, - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "include_named_cookies": [ - "include_named_cookies_value1", - "include_named_cookies_value2", - ], - "include_protocol": True, - "include_query_string": True, - "query_string_blacklist": [ - "query_string_blacklist_value1", - "query_string_blacklist_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "circuit_breakers": { - "max_connections": 1608, - "max_pending_requests": 2149, - "max_requests": 1313, - "max_requests_per_connection": 2902, - "max_retries": 1187, - }, - "compression_mode": "compression_mode_value", - "connection_draining": {"draining_timeout_sec": 2124}, - "connection_tracking_policy": { - "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", - "enable_strong_affinity": True, - "idle_timeout_sec": 1694, - "tracking_mode": "tracking_mode_value", - }, - "consistent_hash": { - "http_cookie": { - "name": "name_value", - "path": "path_value", - "ttl": {"nanos": 543, "seconds": 751}, - }, - "http_header_name": "http_header_name_value", - "minimum_ring_size": 1829, - }, - "creation_timestamp": "creation_timestamp_value", - "custom_request_headers": [ - "custom_request_headers_value1", - "custom_request_headers_value2", - ], - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_c_d_n": True, - "failover_policy": { - "disable_connection_drain_on_failover": True, - "drop_traffic_if_unhealthy": True, - "failover_ratio": 0.1494, - }, - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "iap": { - "enabled": True, - "oauth2_client_id": "oauth2_client_id_value", - "oauth2_client_secret": "oauth2_client_secret_value", - "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", - }, - "id": 205, - "kind": "kind_value", - "load_balancing_scheme": "load_balancing_scheme_value", - "locality_lb_policies": [ - { - "custom_policy": {"data": "data_value", "name": "name_value"}, - "policy": {"name": "name_value"}, - } - ], - "locality_lb_policy": "locality_lb_policy_value", - "log_config": { - "enable": True, - "optional_fields": ["optional_fields_value1", "optional_fields_value2"], - "optional_mode": "optional_mode_value", - "sample_rate": 0.1165, - }, - "max_stream_duration": {}, - "metadatas": {}, - "name": "name_value", - "network": "network_value", - "outlier_detection": { - "base_ejection_time": {}, - "consecutive_errors": 1956, - "consecutive_gateway_failure": 2880, - "enforcing_consecutive_errors": 3006, - "enforcing_consecutive_gateway_failure": 3930, - "enforcing_success_rate": 2334, - "interval": {}, - "max_ejection_percent": 2118, - "success_rate_minimum_hosts": 2799, - "success_rate_request_volume": 2915, - "success_rate_stdev_factor": 2663, - }, - "port": 453, - "port_name": "port_name_value", - "protocol": "protocol_value", - "region": "region_value", - "security_policy": "security_policy_value", - "security_settings": { - "client_tls_policy": "client_tls_policy_value", - "subject_alt_names": [ - "subject_alt_names_value1", - "subject_alt_names_value2", - ], - }, - "self_link": "self_link_value", - "service_bindings": ["service_bindings_value1", "service_bindings_value2"], - "session_affinity": "session_affinity_value", - "subsetting": {"policy": "policy_value"}, - "timeout_sec": 1185, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = compute.PatchBackendServiceRequest.meta.fields[ - "backend_service_resource" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "backend_service_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backend_service_resource"][field])): - del request_init["backend_service_resource"][field][i][subfield] - else: - del request_init["backend_service_resource"][field][subfield] + request_init = {"project": "sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Operation( - client_operation_id="client_operation_id_value", - creation_timestamp="creation_timestamp_value", - description="description_value", - end_time="end_time_value", - http_error_message="http_error_message_value", - http_error_status_code=2374, - id=205, - insert_time="insert_time_value", + return_value = compute.BackendServiceListUsable( + id="id_value", kind="kind_value", - name="name_value", - operation_group_id="operation_group_id_value", - operation_type="operation_type_value", - progress=885, - region="region_value", + next_page_token="next_page_token_value", self_link="self_link_value", - start_time="start_time_value", - status=compute.Operation.Status.DONE, - status_message="status_message_value", - target_id=947, - target_link="target_link_value", - user="user_value", - zone="zone_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.BackendServiceListUsable.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch(request) + response = client.list_usable(request) # Establish that the response is the type that we expect. - assert isinstance(response, extended_operation.ExtendedOperation) - assert response.client_operation_id == "client_operation_id_value" - assert response.creation_timestamp == "creation_timestamp_value" - assert response.description == "description_value" - assert response.end_time == "end_time_value" - assert response.http_error_message == "http_error_message_value" - assert response.http_error_status_code == 2374 - assert response.id == 205 - assert response.insert_time == "insert_time_value" + assert isinstance(response, pagers.ListUsablePager) + assert response.id == "id_value" assert response.kind == "kind_value" - assert response.name == "name_value" - assert response.operation_group_id == "operation_group_id_value" - assert response.operation_type == "operation_type_value" - assert response.progress == 885 - assert response.region == "region_value" + assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.start_time == "start_time_value" - assert response.status == compute.Operation.Status.DONE - assert response.status_message == "status_message_value" - assert response.target_id == 947 - assert response.target_link == "target_link_value" - assert response.user == "user_value" - assert response.zone == "zone_value" -def test_patch_rest_required_fields(request_type=compute.PatchBackendServiceRequest): +def test_list_usable_rest_required_fields( + request_type=compute.ListUsableBackendServicesRequest, +): transport_class = transports.BackendServicesRestTransport request_init = {} - request_init["backend_service"] = "" request_init["project"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) @@ -5799,24 +5554,29 @@ def test_patch_rest_required_fields(request_type=compute.PatchBackendServiceRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).list_usable._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["backendService"] = "backend_service_value" jsonified_request["project"] = "project_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).list_usable._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "backendService" in jsonified_request - assert jsonified_request["backendService"] == "backend_service_value" assert "project" in jsonified_request assert jsonified_request["project"] == "project_value" @@ -5827,7 +5587,7 @@ def test_patch_rest_required_fields(request_type=compute.PatchBackendServiceRequ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.Operation() + return_value = compute.BackendServiceListUsable() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5839,49 +5599,50 @@ def test_patch_rest_required_fields(request_type=compute.PatchBackendServiceRequ pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.BackendServiceListUsable.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch(request) + response = client.list_usable(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_patch_rest_unset_required_fields(): +def test_list_usable_rest_unset_required_fields(): transport = transports.BackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.patch._get_unset_required_fields({}) + unset_fields = transport.list_usable._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) - & set( + set( ( - "backendService", - "backendServiceResource", - "project", + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", ) ) + & set(("project",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_patch_rest_interceptors(null_interceptor): +def test_list_usable_rest_interceptors(null_interceptor): transport = transports.BackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5894,14 +5655,14 @@ def test_patch_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackendServicesRestInterceptor, "post_patch" + transports.BackendServicesRestInterceptor, "post_list_usable" ) as post, mock.patch.object( - transports.BackendServicesRestInterceptor, "pre_patch" + transports.BackendServicesRestInterceptor, "pre_list_usable" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.PatchBackendServiceRequest.pb( - compute.PatchBackendServiceRequest() + pb_message = compute.ListUsableBackendServicesRequest.pb( + compute.ListUsableBackendServicesRequest() ) transcode.return_value = { "method": "post", @@ -5913,17 +5674,19 @@ def test_patch_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.Operation.to_json(compute.Operation()) + req.return_value._content = compute.BackendServiceListUsable.to_json( + compute.BackendServiceListUsable() + ) - request = compute.PatchBackendServiceRequest() + request = compute.ListUsableBackendServicesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.Operation() + post.return_value = compute.BackendServiceListUsable() - client.patch( + client.list_usable( request, metadata=[ ("key", "val"), @@ -5935,8 +5698,8 @@ def test_patch_rest_interceptors(null_interceptor): post.assert_called_once() -def test_patch_rest_bad_request( - transport: str = "rest", request_type=compute.PatchBackendServiceRequest +def test_list_usable_rest_bad_request( + transport: str = "rest", request_type=compute.ListUsableBackendServicesRequest ): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5944,7 +5707,7 @@ def test_patch_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "backend_service": "sample2"} + request_init = {"project": "sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5956,10 +5719,10 @@ def test_patch_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.patch(request) + client.list_usable(request) -def test_patch_rest_flattened(): +def test_list_usable_rest_flattened(): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5968,18 +5731,14 @@ def test_patch_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Operation() + return_value = compute.BackendServiceListUsable() # get arguments that satisfy an http rule for this method - sample_request = {"project": "sample1", "backend_service": "sample2"} + sample_request = {"project": "sample1"} # get truthy value for each flattened field mock_args = dict( project="project_value", - backend_service="backend_service_value", - backend_service_resource=compute.BackendService( - affinity_cookie_ttl_sec=2432 - ), ) mock_args.update(sample_request) @@ -5987,25 +5746,25 @@ def test_patch_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.BackendServiceListUsable.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.patch(**mock_args) + client.list_usable(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" + "%s/compute/v1/projects/{project}/global/backendServices/listUsable" % client.transport._host, args[1], ) -def test_patch_rest_flattened_error(transport: str = "rest"): +def test_list_usable_rest_flattened_error(transport: str = "rest"): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6014,21 +5773,72 @@ def test_patch_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.patch( - compute.PatchBackendServiceRequest(), + client.list_usable( + compute.ListUsableBackendServicesRequest(), project="project_value", - backend_service="backend_service_value", - backend_service_resource=compute.BackendService( - affinity_cookie_ttl_sec=2432 - ), ) -def test_patch_rest_error(): +def test_list_usable_rest_pager(transport: str = "rest"): client = BackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.BackendServiceListUsable( + items=[ + compute.BackendService(), + compute.BackendService(), + compute.BackendService(), + ], + next_page_token="abc", + ), + compute.BackendServiceListUsable( + items=[], + next_page_token="def", + ), + compute.BackendServiceListUsable( + items=[ + compute.BackendService(), + ], + next_page_token="ghi", + ), + compute.BackendServiceListUsable( + items=[ + compute.BackendService(), + compute.BackendService(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.BackendServiceListUsable.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1"} + + pager = client.list_usable(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.BackendService) for i in results) + + pages = list(client.list_usable(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", @@ -6037,7 +5847,7 @@ def test_patch_rest_error(): dict, ], ) -def test_patch_unary_rest(request_type): +def test_patch_rest(request_type): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6188,6 +5998,12 @@ def test_patch_unary_rest(request_type): "region": "region_value", "security_policy": "security_policy_value", "security_settings": { + "aws_v4_authentication": { + "access_key": "access_key_value", + "access_key_id": "access_key_id_value", + "access_key_version": "access_key_version_value", + "origin_region": "origin_region_value", + }, "client_tls_policy": "client_tls_policy_value", "subject_alt_names": [ "subject_alt_names_value1", @@ -6199,6 +6015,7 @@ def test_patch_unary_rest(request_type): "session_affinity": "session_affinity_value", "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, + "used_by": [{"reference": "reference_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6310,15 +6127,35 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch_unary(request) + response = client.patch(request) # Establish that the response is the type that we expect. - assert isinstance(response, compute.Operation) + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" -def test_patch_unary_rest_required_fields( - request_type=compute.PatchBackendServiceRequest, -): +def test_patch_rest_required_fields(request_type=compute.PatchBackendServiceRequest): transport_class = transports.BackendServicesRestTransport request_init = {} @@ -6394,14 +6231,14 @@ def test_patch_unary_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch_unary(request) + response = client.patch(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_patch_unary_rest_unset_required_fields(): +def test_patch_rest_unset_required_fields(): transport = transports.BackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials ) @@ -6420,7 +6257,7 @@ def test_patch_unary_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_patch_unary_rest_interceptors(null_interceptor): +def test_patch_rest_interceptors(null_interceptor): transport = transports.BackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6462,7 +6299,7 @@ def test_patch_unary_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.patch_unary( + client.patch( request, metadata=[ ("key", "val"), @@ -6474,7 +6311,7 @@ def test_patch_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_patch_unary_rest_bad_request( +def test_patch_rest_bad_request( transport: str = "rest", request_type=compute.PatchBackendServiceRequest ): client = BackendServicesClient( @@ -6495,10 +6332,10 @@ def test_patch_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.patch_unary(request) + client.patch(request) -def test_patch_unary_rest_flattened(): +def test_patch_rest_flattened(): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6531,7 +6368,7 @@ def test_patch_unary_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.patch_unary(**mock_args) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -6544,7 +6381,7 @@ def test_patch_unary_rest_flattened(): ) -def test_patch_unary_rest_flattened_error(transport: str = "rest"): +def test_patch_rest_flattened_error(transport: str = "rest"): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6553,7 +6390,7 @@ def test_patch_unary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.patch_unary( + client.patch( compute.PatchBackendServiceRequest(), project="project_value", backend_service="backend_service_value", @@ -6563,7 +6400,7 @@ def test_patch_unary_rest_flattened_error(transport: str = "rest"): ) -def test_patch_unary_rest_error(): +def test_patch_rest_error(): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6572,11 +6409,11 @@ def test_patch_unary_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.SetEdgeSecurityPolicyBackendServiceRequest, + compute.PatchBackendServiceRequest, dict, ], ) -def test_set_edge_security_policy_rest(request_type): +def test_patch_unary_rest(request_type): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6584,57 +6421,1017 @@ def test_set_edge_security_policy_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["security_policy_reference_resource"] = { - "security_policy": "security_policy_value" - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = compute.SetEdgeSecurityPolicyBackendServiceRequest.meta.fields[ - "security_policy_reference_resource" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "security_policy_reference_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - + request_init["backend_service_resource"] = { + "affinity_cookie_ttl_sec": 2432, + "backends": [ + { + "balancing_mode": "balancing_mode_value", + "capacity_scaler": 0.1575, + "description": "description_value", + "failover": True, + "group": "group_value", + "max_connections": 1608, + "max_connections_per_endpoint": 2990, + "max_connections_per_instance": 2978, + "max_rate": 849, + "max_rate_per_endpoint": 0.22310000000000002, + "max_rate_per_instance": 0.22190000000000001, + "max_utilization": 0.1633, + } + ], + "cdn_policy": { + "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], + "cache_key_policy": { + "include_host": True, + "include_http_headers": [ + "include_http_headers_value1", + "include_http_headers_value2", + ], + "include_named_cookies": [ + "include_named_cookies_value1", + "include_named_cookies_value2", + ], + "include_protocol": True, + "include_query_string": True, + "query_string_blacklist": [ + "query_string_blacklist_value1", + "query_string_blacklist_value2", + ], + "query_string_whitelist": [ + "query_string_whitelist_value1", + "query_string_whitelist_value2", + ], + }, + "cache_mode": "cache_mode_value", + "client_ttl": 1074, + "default_ttl": 1176, + "max_ttl": 761, + "negative_caching": True, + "negative_caching_policy": [{"code": 411, "ttl": 340}], + "request_coalescing": True, + "serve_while_stale": 1813, + "signed_url_cache_max_age_sec": 2890, + "signed_url_key_names": [ + "signed_url_key_names_value1", + "signed_url_key_names_value2", + ], + }, + "circuit_breakers": { + "max_connections": 1608, + "max_pending_requests": 2149, + "max_requests": 1313, + "max_requests_per_connection": 2902, + "max_retries": 1187, + }, + "compression_mode": "compression_mode_value", + "connection_draining": {"draining_timeout_sec": 2124}, + "connection_tracking_policy": { + "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", + "enable_strong_affinity": True, + "idle_timeout_sec": 1694, + "tracking_mode": "tracking_mode_value", + }, + "consistent_hash": { + "http_cookie": { + "name": "name_value", + "path": "path_value", + "ttl": {"nanos": 543, "seconds": 751}, + }, + "http_header_name": "http_header_name_value", + "minimum_ring_size": 1829, + }, + "creation_timestamp": "creation_timestamp_value", + "custom_request_headers": [ + "custom_request_headers_value1", + "custom_request_headers_value2", + ], + "custom_response_headers": [ + "custom_response_headers_value1", + "custom_response_headers_value2", + ], + "description": "description_value", + "edge_security_policy": "edge_security_policy_value", + "enable_c_d_n": True, + "failover_policy": { + "disable_connection_drain_on_failover": True, + "drop_traffic_if_unhealthy": True, + "failover_ratio": 0.1494, + }, + "fingerprint": "fingerprint_value", + "health_checks": ["health_checks_value1", "health_checks_value2"], + "iap": { + "enabled": True, + "oauth2_client_id": "oauth2_client_id_value", + "oauth2_client_secret": "oauth2_client_secret_value", + "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", + }, + "id": 205, + "kind": "kind_value", + "load_balancing_scheme": "load_balancing_scheme_value", + "locality_lb_policies": [ + { + "custom_policy": {"data": "data_value", "name": "name_value"}, + "policy": {"name": "name_value"}, + } + ], + "locality_lb_policy": "locality_lb_policy_value", + "log_config": { + "enable": True, + "optional_fields": ["optional_fields_value1", "optional_fields_value2"], + "optional_mode": "optional_mode_value", + "sample_rate": 0.1165, + }, + "max_stream_duration": {}, + "metadatas": {}, + "name": "name_value", + "network": "network_value", + "outlier_detection": { + "base_ejection_time": {}, + "consecutive_errors": 1956, + "consecutive_gateway_failure": 2880, + "enforcing_consecutive_errors": 3006, + "enforcing_consecutive_gateway_failure": 3930, + "enforcing_success_rate": 2334, + "interval": {}, + "max_ejection_percent": 2118, + "success_rate_minimum_hosts": 2799, + "success_rate_request_volume": 2915, + "success_rate_stdev_factor": 2663, + }, + "port": 453, + "port_name": "port_name_value", + "protocol": "protocol_value", + "region": "region_value", + "security_policy": "security_policy_value", + "security_settings": { + "aws_v4_authentication": { + "access_key": "access_key_value", + "access_key_id": "access_key_id_value", + "access_key_version": "access_key_version_value", + "origin_region": "origin_region_value", + }, + "client_tls_policy": "client_tls_policy_value", + "subject_alt_names": [ + "subject_alt_names_value1", + "subject_alt_names_value2", + ], + }, + "self_link": "self_link_value", + "service_bindings": ["service_bindings_value1", "service_bindings_value2"], + "session_affinity": "session_affinity_value", + "subsetting": {"policy": "policy_value"}, + "timeout_sec": 1185, + "used_by": [{"reference": "reference_value"}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields( + request_type=compute.PatchBackendServiceRequest, +): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = "backend_service_value" + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == "backend_service_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "backendService", + "backendServiceResource", + "project", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackendServicesRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchBackendServiceRequest.pb( + compute.PatchBackendServiceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchBackendServiceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request( + transport: str = "rest", request_type=compute.PatchBackendServiceRequest +): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_service": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + backend_service="backend_service_value", + backend_service_resource=compute.BackendService( + affinity_cookie_ttl_sec=2432 + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" + % client.transport._host, + args[1], + ) + + +def test_patch_unary_rest_flattened_error(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchBackendServiceRequest(), + project="project_value", + backend_service="backend_service_value", + backend_service_resource=compute.BackendService( + affinity_cookie_ttl_sec=2432 + ), + ) + + +def test_patch_unary_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetEdgeSecurityPolicyBackendServiceRequest, + dict, + ], +) +def test_set_edge_security_policy_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request_init["security_policy_reference_resource"] = { + "security_policy": "security_policy_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetEdgeSecurityPolicyBackendServiceRequest.meta.fields[ + "security_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_reference_resource"][field]) + ): + del request_init["security_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_reference_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_edge_security_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_set_edge_security_policy_rest_required_fields( + request_type=compute.SetEdgeSecurityPolicyBackendServiceRequest, +): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_edge_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = "backend_service_value" + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_edge_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == "backend_service_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_edge_security_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_edge_security_policy_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_edge_security_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "backendService", + "project", + "securityPolicyReferenceResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_edge_security_policy_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackendServicesRestInterceptor, "post_set_edge_security_policy" + ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, "pre_set_edge_security_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetEdgeSecurityPolicyBackendServiceRequest.pb( + compute.SetEdgeSecurityPolicyBackendServiceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetEdgeSecurityPolicyBackendServiceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_edge_security_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_edge_security_policy_rest_bad_request( + transport: str = "rest", + request_type=compute.SetEdgeSecurityPolicyBackendServiceRequest, +): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_edge_security_policy(request) + + +def test_set_edge_security_policy_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_service": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + backend_service="backend_service_value", + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_edge_security_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/setEdgeSecurityPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_edge_security_policy_rest_flattened_error(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_edge_security_policy( + compute.SetEdgeSecurityPolicyBackendServiceRequest(), + project="project_value", + backend_service="backend_service_value", + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" + ), + ) + + +def test_set_edge_security_policy_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetEdgeSecurityPolicyBackendServiceRequest, + dict, + ], +) +def test_set_edge_security_policy_unary_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request_init["security_policy_reference_resource"] = { + "security_policy": "security_policy_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetEdgeSecurityPolicyBackendServiceRequest.meta.fields[ + "security_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + if result and hasattr(result, "keys"): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: @@ -6701,35 +7498,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_edge_security_policy(request) + response = client.set_edge_security_policy_unary(request) # Establish that the response is the type that we expect. - assert isinstance(response, extended_operation.ExtendedOperation) - assert response.client_operation_id == "client_operation_id_value" - assert response.creation_timestamp == "creation_timestamp_value" - assert response.description == "description_value" - assert response.end_time == "end_time_value" - assert response.http_error_message == "http_error_message_value" - assert response.http_error_status_code == 2374 - assert response.id == 205 - assert response.insert_time == "insert_time_value" - assert response.kind == "kind_value" - assert response.name == "name_value" - assert response.operation_group_id == "operation_group_id_value" - assert response.operation_type == "operation_type_value" - assert response.progress == 885 - assert response.region == "region_value" - assert response.self_link == "self_link_value" - assert response.start_time == "start_time_value" - assert response.status == compute.Operation.Status.DONE - assert response.status_message == "status_message_value" - assert response.target_id == 947 - assert response.target_link == "target_link_value" - assert response.user == "user_value" - assert response.zone == "zone_value" + assert isinstance(response, compute.Operation) -def test_set_edge_security_policy_rest_required_fields( +def test_set_edge_security_policy_unary_rest_required_fields( request_type=compute.SetEdgeSecurityPolicyBackendServiceRequest, ): transport_class = transports.BackendServicesRestTransport @@ -6807,14 +7582,14 @@ def test_set_edge_security_policy_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_edge_security_policy(request) + response = client.set_edge_security_policy_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_edge_security_policy_rest_unset_required_fields(): +def test_set_edge_security_policy_unary_rest_unset_required_fields(): transport = transports.BackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials ) @@ -6833,7 +7608,7 @@ def test_set_edge_security_policy_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_edge_security_policy_rest_interceptors(null_interceptor): +def test_set_edge_security_policy_unary_rest_interceptors(null_interceptor): transport = transports.BackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6875,7 +7650,7 @@ def test_set_edge_security_policy_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.set_edge_security_policy( + client.set_edge_security_policy_unary( request, metadata=[ ("key", "val"), @@ -6887,7 +7662,7 @@ def test_set_edge_security_policy_rest_interceptors(null_interceptor): post.assert_called_once() -def test_set_edge_security_policy_rest_bad_request( +def test_set_edge_security_policy_unary_rest_bad_request( transport: str = "rest", request_type=compute.SetEdgeSecurityPolicyBackendServiceRequest, ): @@ -6909,10 +7684,10 @@ def test_set_edge_security_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.set_edge_security_policy(request) + client.set_edge_security_policy_unary(request) -def test_set_edge_security_policy_rest_flattened(): +def test_set_edge_security_policy_unary_rest_flattened(): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6945,7 +7720,7 @@ def test_set_edge_security_policy_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.set_edge_security_policy(**mock_args) + client.set_edge_security_policy_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -6958,7 +7733,7 @@ def test_set_edge_security_policy_rest_flattened(): ) -def test_set_edge_security_policy_rest_flattened_error(transport: str = "rest"): +def test_set_edge_security_policy_unary_rest_flattened_error(transport: str = "rest"): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6967,7 +7742,7 @@ def test_set_edge_security_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_edge_security_policy( + client.set_edge_security_policy_unary( compute.SetEdgeSecurityPolicyBackendServiceRequest(), project="project_value", backend_service="backend_service_value", @@ -6977,7 +7752,7 @@ def test_set_edge_security_policy_rest_flattened_error(transport: str = "rest"): ) -def test_set_edge_security_policy_rest_error(): +def test_set_edge_security_policy_unary_rest_error(): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6986,28 +7761,102 @@ def test_set_edge_security_policy_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.SetEdgeSecurityPolicyBackendServiceRequest, + compute.SetIamPolicyBackendServiceRequest, dict, ], ) -def test_set_edge_security_policy_unary_rest(request_type): +def test_set_iam_policy_rest(request_type): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["security_policy_reference_resource"] = { - "security_policy": "security_policy_value" + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_policy_request_resource"] = { + "bindings": [ + { + "binding_id": "binding_id_value", + "condition": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "members": ["members_value1", "members_value2"], + "role": "role_value", + } + ], + "etag": "etag_value", + "policy": { + "audit_configs": [ + { + "audit_log_configs": [ + { + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "ignore_child_exemptions": True, + "log_type": "log_type_value", + } + ], + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "service": "service_value", + } + ], + "bindings": {}, + "etag": "etag_value", + "iam_owned": True, + "rules": [ + { + "action": "action_value", + "conditions": [ + { + "iam": "iam_value", + "op": "op_value", + "svc": "svc_value", + "sys": "sys_value", + "values": ["values_value1", "values_value2"], + } + ], + "description": "description_value", + "ins": ["ins_value1", "ins_value2"], + "log_configs": [ + { + "cloud_audit": { + "authorization_logging_options": { + "permission_type": "permission_type_value" + }, + "log_name": "log_name_value", + }, + "counter": { + "custom_fields": [ + {"name": "name_value", "value": "value_value"} + ], + "field": "field_value", + "metric": "metric_value", + }, + "data_access": {"log_mode": "log_mode_value"}, + } + ], + "not_ins": ["not_ins_value1", "not_ins_value2"], + "permissions": ["permissions_value1", "permissions_value2"], + } + ], + "version": 774, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = compute.SetEdgeSecurityPolicyBackendServiceRequest.meta.fields[ - "security_policy_reference_resource" + test_field = compute.SetIamPolicyBackendServiceRequest.meta.fields[ + "global_set_policy_request_resource" ] def get_message_fields(field): @@ -7037,7 +7886,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime for field, value in request_init[ - "security_policy_reference_resource" + "global_set_policy_request_resource" ].items(): # pragma: NO COVER result = None is_repeated = False @@ -7069,66 +7918,50 @@ def get_message_fields(field): if subfield: if field_repeated: for i in range( - 0, len(request_init["security_policy_reference_resource"][field]) + 0, len(request_init["global_set_policy_request_resource"][field]) ): - del request_init["security_policy_reference_resource"][field][i][ + del request_init["global_set_policy_request_resource"][field][i][ subfield ] - else: - del request_init["security_policy_reference_resource"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.Operation( - client_operation_id="client_operation_id_value", - creation_timestamp="creation_timestamp_value", - description="description_value", - end_time="end_time_value", - http_error_message="http_error_message_value", - http_error_status_code=2374, - id=205, - insert_time="insert_time_value", - kind="kind_value", - name="name_value", - operation_group_id="operation_group_id_value", - operation_type="operation_type_value", - progress=885, - region="region_value", - self_link="self_link_value", - start_time="start_time_value", - status=compute.Operation.Status.DONE, - status_message="status_message_value", - target_id=947, - target_link="target_link_value", - user="user_value", - zone="zone_value", + else: + del request_init["global_set_policy_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag="etag_value", + iam_owned=True, + version=774, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.Policy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_edge_security_policy_unary(request) + response = client.set_iam_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, compute.Operation) + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 -def test_set_edge_security_policy_unary_rest_required_fields( - request_type=compute.SetEdgeSecurityPolicyBackendServiceRequest, +def test_set_iam_policy_rest_required_fields( + request_type=compute.SetIamPolicyBackendServiceRequest, ): transport_class = transports.BackendServicesRestTransport request_init = {} - request_init["backend_service"] = "" request_init["project"] = "" + request_init["resource"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7143,26 +7976,24 @@ def test_set_edge_security_policy_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_edge_security_policy._get_unset_required_fields(jsonified_request) + ).set_iam_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["backendService"] = "backend_service_value" jsonified_request["project"] = "project_value" + jsonified_request["resource"] = "resource_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_edge_security_policy._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + ).set_iam_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "backendService" in jsonified_request - assert jsonified_request["backendService"] == "backend_service_value" assert "project" in jsonified_request assert jsonified_request["project"] == "project_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7171,7 +8002,7 @@ def test_set_edge_security_policy_unary_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.Operation() + return_value = compute.Policy() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7193,39 +8024,39 @@ def test_set_edge_security_policy_unary_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.Policy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_edge_security_policy_unary(request) + response = client.set_iam_policy(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_edge_security_policy_unary_rest_unset_required_fields(): +def test_set_iam_policy_rest_unset_required_fields(): transport = transports.BackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.set_edge_security_policy._get_unset_required_fields({}) + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) + set(()) & set( ( - "backendService", + "globalSetPolicyRequestResource", "project", - "securityPolicyReferenceResource", + "resource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_edge_security_policy_unary_rest_interceptors(null_interceptor): +def test_set_iam_policy_rest_interceptors(null_interceptor): transport = transports.BackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7238,14 +8069,14 @@ def test_set_edge_security_policy_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackendServicesRestInterceptor, "post_set_edge_security_policy" + transports.BackendServicesRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( - transports.BackendServicesRestInterceptor, "pre_set_edge_security_policy" + transports.BackendServicesRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.SetEdgeSecurityPolicyBackendServiceRequest.pb( - compute.SetEdgeSecurityPolicyBackendServiceRequest() + pb_message = compute.SetIamPolicyBackendServiceRequest.pb( + compute.SetIamPolicyBackendServiceRequest() ) transcode.return_value = { "method": "post", @@ -7257,17 +8088,17 @@ def test_set_edge_security_policy_unary_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.Operation.to_json(compute.Operation()) + req.return_value._content = compute.Policy.to_json(compute.Policy()) - request = compute.SetEdgeSecurityPolicyBackendServiceRequest() + request = compute.SetIamPolicyBackendServiceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.Operation() + post.return_value = compute.Policy() - client.set_edge_security_policy_unary( + client.set_iam_policy( request, metadata=[ ("key", "val"), @@ -7279,9 +8110,8 @@ def test_set_edge_security_policy_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_set_edge_security_policy_unary_rest_bad_request( - transport: str = "rest", - request_type=compute.SetEdgeSecurityPolicyBackendServiceRequest, +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyBackendServiceRequest ): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7289,7 +8119,7 @@ def test_set_edge_security_policy_unary_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "backend_service": "sample2"} + request_init = {"project": "sample1", "resource": "sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7301,10 +8131,10 @@ def test_set_edge_security_policy_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.set_edge_security_policy_unary(request) + client.set_iam_policy(request) -def test_set_edge_security_policy_unary_rest_flattened(): +def test_set_iam_policy_rest_flattened(): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7313,17 +8143,17 @@ def test_set_edge_security_policy_unary_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Operation() + return_value = compute.Policy() # get arguments that satisfy an http rule for this method - sample_request = {"project": "sample1", "backend_service": "sample2"} + sample_request = {"project": "sample1", "resource": "sample2"} # get truthy value for each flattened field mock_args = dict( project="project_value", - backend_service="backend_service_value", - security_policy_reference_resource=compute.SecurityPolicyReference( - security_policy="security_policy_value" + resource="resource_value", + global_set_policy_request_resource=compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] ), ) mock_args.update(sample_request) @@ -7332,25 +8162,25 @@ def test_set_edge_security_policy_unary_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.Policy.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.set_edge_security_policy_unary(**mock_args) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/setEdgeSecurityPolicy" + "%s/compute/v1/projects/{project}/global/backendServices/{resource}/setIamPolicy" % client.transport._host, args[1], ) -def test_set_edge_security_policy_unary_rest_flattened_error(transport: str = "rest"): +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7359,17 +8189,17 @@ def test_set_edge_security_policy_unary_rest_flattened_error(transport: str = "r # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_edge_security_policy_unary( - compute.SetEdgeSecurityPolicyBackendServiceRequest(), + client.set_iam_policy( + compute.SetIamPolicyBackendServiceRequest(), project="project_value", - backend_service="backend_service_value", - security_policy_reference_resource=compute.SecurityPolicyReference( - security_policy="security_policy_value" + resource="resource_value", + global_set_policy_request_resource=compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] ), ) -def test_set_edge_security_policy_unary_rest_error(): +def test_set_iam_policy_rest_error(): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7378,102 +8208,28 @@ def test_set_edge_security_policy_unary_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.SetIamPolicyBackendServiceRequest, + compute.SetSecurityPolicyBackendServiceRequest, dict, ], ) -def test_set_iam_policy_rest(request_type): +def test_set_security_policy_rest(request_type): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "resource": "sample2"} - request_init["global_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, + request_init = {"project": "sample1", "backend_service": "sample2"} + request_init["security_policy_reference_resource"] = { + "security_policy": "security_policy_value" } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = compute.SetIamPolicyBackendServiceRequest.meta.fields[ - "global_set_policy_request_resource" + test_field = compute.SetSecurityPolicyBackendServiceRequest.meta.fields[ + "security_policy_reference_resource" ] def get_message_fields(field): @@ -7503,7 +8259,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime for field, value in request_init[ - "global_set_policy_request_resource" + "security_policy_reference_resource" ].items(): # pragma: NO COVER result = None is_repeated = False @@ -7535,50 +8291,88 @@ def get_message_fields(field): if subfield: if field_repeated: for i in range( - 0, len(request_init["global_set_policy_request_resource"][field]) + 0, len(request_init["security_policy_reference_resource"][field]) ): - del request_init["global_set_policy_request_resource"][field][i][ + del request_init["security_policy_reference_resource"][field][i][ subfield ] else: - del request_init["global_set_policy_request_resource"][field][subfield] + del request_init["security_policy_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - etag="etag_value", - iam_owned=True, - version=774, + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Policy.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_iam_policy(request) + response = client.set_security_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, compute.Policy) - assert response.etag == "etag_value" - assert response.iam_owned is True - assert response.version == 774 + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" -def test_set_iam_policy_rest_required_fields( - request_type=compute.SetIamPolicyBackendServiceRequest, +def test_set_security_policy_rest_required_fields( + request_type=compute.SetSecurityPolicyBackendServiceRequest, ): transport_class = transports.BackendServicesRestTransport request_init = {} + request_init["backend_service"] = "" request_init["project"] = "" - request_init["resource"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7593,24 +8387,26 @@ def test_set_iam_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_iam_policy._get_unset_required_fields(jsonified_request) + ).set_security_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["backendService"] = "backend_service_value" jsonified_request["project"] = "project_value" - jsonified_request["resource"] = "resource_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_iam_policy._get_unset_required_fields(jsonified_request) + ).set_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == "backend_service_value" assert "project" in jsonified_request assert jsonified_request["project"] == "project_value" - assert "resource" in jsonified_request - assert jsonified_request["resource"] == "resource_value" client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7619,7 +8415,7 @@ def test_set_iam_policy_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.Policy() + return_value = compute.Operation() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7641,39 +8437,39 @@ def test_set_iam_policy_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Policy.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_iam_policy(request) + response = client.set_security_policy(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_iam_policy_rest_unset_required_fields(): +def test_set_security_policy_rest_unset_required_fields(): transport = transports.BackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + unset_fields = transport.set_security_policy._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("requestId",)) & set( ( - "globalSetPolicyRequestResource", + "backendService", "project", - "resource", + "securityPolicyReferenceResource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_iam_policy_rest_interceptors(null_interceptor): +def test_set_security_policy_rest_interceptors(null_interceptor): transport = transports.BackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7686,14 +8482,14 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackendServicesRestInterceptor, "post_set_iam_policy" + transports.BackendServicesRestInterceptor, "post_set_security_policy" ) as post, mock.patch.object( - transports.BackendServicesRestInterceptor, "pre_set_iam_policy" + transports.BackendServicesRestInterceptor, "pre_set_security_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.SetIamPolicyBackendServiceRequest.pb( - compute.SetIamPolicyBackendServiceRequest() + pb_message = compute.SetSecurityPolicyBackendServiceRequest.pb( + compute.SetSecurityPolicyBackendServiceRequest() ) transcode.return_value = { "method": "post", @@ -7705,17 +8501,17 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.Policy.to_json(compute.Policy()) + req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.SetIamPolicyBackendServiceRequest() + request = compute.SetSecurityPolicyBackendServiceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.Policy() + post.return_value = compute.Operation() - client.set_iam_policy( + client.set_security_policy( request, metadata=[ ("key", "val"), @@ -7727,8 +8523,8 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): post.assert_called_once() -def test_set_iam_policy_rest_bad_request( - transport: str = "rest", request_type=compute.SetIamPolicyBackendServiceRequest +def test_set_security_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetSecurityPolicyBackendServiceRequest ): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7736,7 +8532,7 @@ def test_set_iam_policy_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "resource": "sample2"} + request_init = {"project": "sample1", "backend_service": "sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7748,10 +8544,10 @@ def test_set_iam_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.set_iam_policy(request) + client.set_security_policy(request) -def test_set_iam_policy_rest_flattened(): +def test_set_security_policy_rest_flattened(): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7760,17 +8556,17 @@ def test_set_iam_policy_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy() + return_value = compute.Operation() # get arguments that satisfy an http rule for this method - sample_request = {"project": "sample1", "resource": "sample2"} + sample_request = {"project": "sample1", "backend_service": "sample2"} # get truthy value for each flattened field mock_args = dict( project="project_value", - resource="resource_value", - global_set_policy_request_resource=compute.GlobalSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] + backend_service="backend_service_value", + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" ), ) mock_args.update(sample_request) @@ -7779,25 +8575,25 @@ def test_set_iam_policy_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Policy.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.set_iam_policy(**mock_args) + client.set_security_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/global/backendServices/{resource}/setIamPolicy" + "%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/setSecurityPolicy" % client.transport._host, args[1], ) -def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): +def test_set_security_policy_rest_flattened_error(transport: str = "rest"): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7806,17 +8602,17 @@ def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_iam_policy( - compute.SetIamPolicyBackendServiceRequest(), + client.set_security_policy( + compute.SetSecurityPolicyBackendServiceRequest(), project="project_value", - resource="resource_value", - global_set_policy_request_resource=compute.GlobalSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] + backend_service="backend_service_value", + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" ), ) -def test_set_iam_policy_rest_error(): +def test_set_security_policy_rest_error(): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7829,7 +8625,7 @@ def test_set_iam_policy_rest_error(): dict, ], ) -def test_set_security_policy_rest(request_type): +def test_set_security_policy_unary_rest(request_type): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7954,35 +8750,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_security_policy(request) + response = client.set_security_policy_unary(request) # Establish that the response is the type that we expect. - assert isinstance(response, extended_operation.ExtendedOperation) - assert response.client_operation_id == "client_operation_id_value" - assert response.creation_timestamp == "creation_timestamp_value" - assert response.description == "description_value" - assert response.end_time == "end_time_value" - assert response.http_error_message == "http_error_message_value" - assert response.http_error_status_code == 2374 - assert response.id == 205 - assert response.insert_time == "insert_time_value" - assert response.kind == "kind_value" - assert response.name == "name_value" - assert response.operation_group_id == "operation_group_id_value" - assert response.operation_type == "operation_type_value" - assert response.progress == 885 - assert response.region == "region_value" - assert response.self_link == "self_link_value" - assert response.start_time == "start_time_value" - assert response.status == compute.Operation.Status.DONE - assert response.status_message == "status_message_value" - assert response.target_id == 947 - assert response.target_link == "target_link_value" - assert response.user == "user_value" - assert response.zone == "zone_value" + assert isinstance(response, compute.Operation) -def test_set_security_policy_rest_required_fields( +def test_set_security_policy_unary_rest_required_fields( request_type=compute.SetSecurityPolicyBackendServiceRequest, ): transport_class = transports.BackendServicesRestTransport @@ -8060,14 +8834,14 @@ def test_set_security_policy_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_security_policy(request) + response = client.set_security_policy_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_security_policy_rest_unset_required_fields(): +def test_set_security_policy_unary_rest_unset_required_fields(): transport = transports.BackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials ) @@ -8086,7 +8860,7 @@ def test_set_security_policy_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_security_policy_rest_interceptors(null_interceptor): +def test_set_security_policy_unary_rest_interceptors(null_interceptor): transport = transports.BackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8128,7 +8902,7 @@ def test_set_security_policy_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.set_security_policy( + client.set_security_policy_unary( request, metadata=[ ("key", "val"), @@ -8140,7 +8914,7 @@ def test_set_security_policy_rest_interceptors(null_interceptor): post.assert_called_once() -def test_set_security_policy_rest_bad_request( +def test_set_security_policy_unary_rest_bad_request( transport: str = "rest", request_type=compute.SetSecurityPolicyBackendServiceRequest ): client = BackendServicesClient( @@ -8161,10 +8935,10 @@ def test_set_security_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.set_security_policy(request) + client.set_security_policy_unary(request) -def test_set_security_policy_rest_flattened(): +def test_set_security_policy_unary_rest_flattened(): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8197,7 +8971,7 @@ def test_set_security_policy_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.set_security_policy(**mock_args) + client.set_security_policy_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -8210,7 +8984,7 @@ def test_set_security_policy_rest_flattened(): ) -def test_set_security_policy_rest_flattened_error(transport: str = "rest"): +def test_set_security_policy_unary_rest_flattened_error(transport: str = "rest"): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8219,7 +8993,7 @@ def test_set_security_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_security_policy( + client.set_security_policy_unary( compute.SetSecurityPolicyBackendServiceRequest(), project="project_value", backend_service="backend_service_value", @@ -8229,7 +9003,7 @@ def test_set_security_policy_rest_flattened_error(transport: str = "rest"): ) -def test_set_security_policy_rest_error(): +def test_set_security_policy_unary_rest_error(): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8238,28 +9012,28 @@ def test_set_security_policy_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.SetSecurityPolicyBackendServiceRequest, + compute.TestIamPermissionsBackendServiceRequest, dict, ], ) -def test_set_security_policy_unary_rest(request_type): +def test_test_iam_permissions_rest(request_type): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "backend_service": "sample2"} - request_init["security_policy_reference_resource"] = { - "security_policy": "security_policy_value" + request_init = {"project": "sample1", "resource": "sample2"} + request_init["test_permissions_request_resource"] = { + "permissions": ["permissions_value1", "permissions_value2"] } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = compute.SetSecurityPolicyBackendServiceRequest.meta.fields[ - "security_policy_reference_resource" + test_field = compute.TestIamPermissionsBackendServiceRequest.meta.fields[ + "test_permissions_request_resource" ] def get_message_fields(field): @@ -8289,7 +9063,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime for field, value in request_init[ - "security_policy_reference_resource" + "test_permissions_request_resource" ].items(): # pragma: NO COVER result = None is_repeated = False @@ -8321,66 +9095,46 @@ def get_message_fields(field): if subfield: if field_repeated: for i in range( - 0, len(request_init["security_policy_reference_resource"][field]) + 0, len(request_init["test_permissions_request_resource"][field]) ): - del request_init["security_policy_reference_resource"][field][i][ + del request_init["test_permissions_request_resource"][field][i][ subfield ] else: - del request_init["security_policy_reference_resource"][field][subfield] + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Operation( - client_operation_id="client_operation_id_value", - creation_timestamp="creation_timestamp_value", - description="description_value", - end_time="end_time_value", - http_error_message="http_error_message_value", - http_error_status_code=2374, - id=205, - insert_time="insert_time_value", - kind="kind_value", - name="name_value", - operation_group_id="operation_group_id_value", - operation_type="operation_type_value", - progress=885, - region="region_value", - self_link="self_link_value", - start_time="start_time_value", - status=compute.Operation.Status.DONE, - status_message="status_message_value", - target_id=947, - target_link="target_link_value", - user="user_value", - zone="zone_value", + return_value = compute.TestPermissionsResponse( + permissions=["permissions_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.TestPermissionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_security_policy_unary(request) + response = client.test_iam_permissions(request) # Establish that the response is the type that we expect. - assert isinstance(response, compute.Operation) + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ["permissions_value"] -def test_set_security_policy_unary_rest_required_fields( - request_type=compute.SetSecurityPolicyBackendServiceRequest, +def test_test_iam_permissions_rest_required_fields( + request_type=compute.TestIamPermissionsBackendServiceRequest, ): transport_class = transports.BackendServicesRestTransport request_init = {} - request_init["backend_service"] = "" request_init["project"] = "" + request_init["resource"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8395,26 +9149,24 @@ def test_set_security_policy_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_security_policy._get_unset_required_fields(jsonified_request) + ).test_iam_permissions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["backendService"] = "backend_service_value" jsonified_request["project"] = "project_value" + jsonified_request["resource"] = "resource_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_security_policy._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + ).test_iam_permissions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "backendService" in jsonified_request - assert jsonified_request["backendService"] == "backend_service_value" assert "project" in jsonified_request assert jsonified_request["project"] == "project_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8423,7 +9175,7 @@ def test_set_security_policy_unary_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.Operation() + return_value = compute.TestPermissionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8445,39 +9197,39 @@ def test_set_security_policy_unary_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.TestPermissionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_security_policy_unary(request) + response = client.test_iam_permissions(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_security_policy_unary_rest_unset_required_fields(): +def test_test_iam_permissions_rest_unset_required_fields(): transport = transports.BackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.set_security_policy._get_unset_required_fields({}) + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) + set(()) & set( ( - "backendService", "project", - "securityPolicyReferenceResource", + "resource", + "testPermissionsRequestResource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_security_policy_unary_rest_interceptors(null_interceptor): +def test_test_iam_permissions_rest_interceptors(null_interceptor): transport = transports.BackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8490,14 +9242,14 @@ def test_set_security_policy_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackendServicesRestInterceptor, "post_set_security_policy" + transports.BackendServicesRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( - transports.BackendServicesRestInterceptor, "pre_set_security_policy" + transports.BackendServicesRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.SetSecurityPolicyBackendServiceRequest.pb( - compute.SetSecurityPolicyBackendServiceRequest() + pb_message = compute.TestIamPermissionsBackendServiceRequest.pb( + compute.TestIamPermissionsBackendServiceRequest() ) transcode.return_value = { "method": "post", @@ -8509,17 +9261,19 @@ def test_set_security_policy_unary_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.Operation.to_json(compute.Operation()) + req.return_value._content = compute.TestPermissionsResponse.to_json( + compute.TestPermissionsResponse() + ) - request = compute.SetSecurityPolicyBackendServiceRequest() + request = compute.TestIamPermissionsBackendServiceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.Operation() + post.return_value = compute.TestPermissionsResponse() - client.set_security_policy_unary( + client.test_iam_permissions( request, metadata=[ ("key", "val"), @@ -8531,8 +9285,9 @@ def test_set_security_policy_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_set_security_policy_unary_rest_bad_request( - transport: str = "rest", request_type=compute.SetSecurityPolicyBackendServiceRequest +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", + request_type=compute.TestIamPermissionsBackendServiceRequest, ): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8540,7 +9295,7 @@ def test_set_security_policy_unary_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "backend_service": "sample2"} + request_init = {"project": "sample1", "resource": "sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8552,10 +9307,10 @@ def test_set_security_policy_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.set_security_policy_unary(request) + client.test_iam_permissions(request) -def test_set_security_policy_unary_rest_flattened(): +def test_test_iam_permissions_rest_flattened(): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8564,17 +9319,17 @@ def test_set_security_policy_unary_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Operation() + return_value = compute.TestPermissionsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"project": "sample1", "backend_service": "sample2"} + sample_request = {"project": "sample1", "resource": "sample2"} # get truthy value for each flattened field mock_args = dict( project="project_value", - backend_service="backend_service_value", - security_policy_reference_resource=compute.SecurityPolicyReference( - security_policy="security_policy_value" + resource="resource_value", + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] ), ) mock_args.update(sample_request) @@ -8583,25 +9338,25 @@ def test_set_security_policy_unary_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.TestPermissionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.set_security_policy_unary(**mock_args) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/setSecurityPolicy" + "%s/compute/v1/projects/{project}/global/backendServices/{resource}/testIamPermissions" % client.transport._host, args[1], ) -def test_set_security_policy_unary_rest_flattened_error(transport: str = "rest"): +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8610,17 +9365,17 @@ def test_set_security_policy_unary_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_security_policy_unary( - compute.SetSecurityPolicyBackendServiceRequest(), + client.test_iam_permissions( + compute.TestIamPermissionsBackendServiceRequest(), project="project_value", - backend_service="backend_service_value", - security_policy_reference_resource=compute.SecurityPolicyReference( - security_policy="security_policy_value" + resource="resource_value", + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] ), ) -def test_set_security_policy_unary_rest_error(): +def test_test_iam_permissions_rest_error(): client = BackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8784,6 +9539,12 @@ def test_update_rest(request_type): "region": "region_value", "security_policy": "security_policy_value", "security_settings": { + "aws_v4_authentication": { + "access_key": "access_key_value", + "access_key_id": "access_key_id_value", + "access_key_version": "access_key_version_value", + "origin_region": "origin_region_value", + }, "client_tls_policy": "client_tls_policy_value", "subject_alt_names": [ "subject_alt_names_value1", @@ -8795,6 +9556,7 @@ def test_update_rest(request_type): "session_affinity": "session_affinity_value", "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, + "used_by": [{"reference": "reference_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -9343,6 +10105,12 @@ def test_update_unary_rest(request_type): "region": "region_value", "security_policy": "security_policy_value", "security_settings": { + "aws_v4_authentication": { + "access_key": "access_key_value", + "access_key_id": "access_key_id_value", + "access_key_version": "access_key_version_value", + "origin_region": "origin_region_value", + }, "client_tls_policy": "client_tls_policy_value", "subject_alt_names": [ "subject_alt_names_value1", @@ -9354,6 +10122,7 @@ def test_update_unary_rest(request_type): "session_affinity": "session_affinity_value", "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, + "used_by": [{"reference": "reference_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -9843,10 +10612,12 @@ def test_backend_services_base_transport(): "get_iam_policy", "insert", "list", + "list_usable", "patch", "set_edge_security_policy", "set_iam_policy", "set_security_policy", + "test_iam_permissions", "update", ) for method in methods: @@ -10012,6 +10783,9 @@ def test_backend_services_client_transport_session_collision(transport_name): session1 = client1.transport.list._session session2 = client2.transport.list._session assert session1 != session2 + session1 = client1.transport.list_usable._session + session2 = client2.transport.list_usable._session + assert session1 != session2 session1 = client1.transport.patch._session session2 = client2.transport.patch._session assert session1 != session2 @@ -10024,6 +10798,9 @@ def test_backend_services_client_transport_session_collision(transport_name): session1 = client1.transport.set_security_policy._session session2 = client2.transport.set_security_policy._session assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 session1 = client1.transport.update._session session2 = client2.transport.update._session assert session1 != session2 diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py index 6abe1f955051..e9c14c5dd40c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py @@ -628,6 +628,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -692,6 +693,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py index a0a1d0959c14..f8869c574016 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py @@ -1454,6 +1454,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -1518,6 +1519,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) @@ -2548,6 +2550,7 @@ def test_create_snapshot_rest(request_type): "description": "description_value", "disk_size_gb": 1261, "download_bytes": 1502, + "guest_os_features": [{"type_": "type__value"}], "id": 205, "kind": "kind_value", "label_fingerprint": "label_fingerprint_value", @@ -2568,6 +2571,7 @@ def test_create_snapshot_rest(request_type): "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, + "source_disk_for_recovery_checkpoint": "source_disk_for_recovery_checkpoint_value", "source_disk_id": "source_disk_id_value", "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", @@ -2998,6 +3002,7 @@ def test_create_snapshot_unary_rest(request_type): "description": "description_value", "disk_size_gb": 1261, "download_bytes": 1502, + "guest_os_features": [{"type_": "type__value"}], "id": 205, "kind": "kind_value", "label_fingerprint": "label_fingerprint_value", @@ -3018,6 +3023,7 @@ def test_create_snapshot_unary_rest(request_type): "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, + "source_disk_for_recovery_checkpoint": "source_disk_for_recovery_checkpoint_value", "source_disk_id": "source_disk_id_value", "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py index 26028c279315..7904c5ec2ed0 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py @@ -660,6 +660,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -724,6 +725,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py index 1e0921d9a06b..ec432abe92f9 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py @@ -659,6 +659,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -723,6 +724,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py index 08152c25b4e9..d6dd569bdf27 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py @@ -1251,6 +1251,7 @@ def test_get_rest(request_type): with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = compute.PublicDelegatedPrefix( + byoip_api_version="byoip_api_version_value", creation_timestamp="creation_timestamp_value", description="description_value", fingerprint="fingerprint_value", @@ -1278,6 +1279,7 @@ def test_get_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, compute.PublicDelegatedPrefix) + assert response.byoip_api_version == "byoip_api_version_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.fingerprint == "fingerprint_value" @@ -1553,6 +1555,7 @@ def test_insert_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1"} request_init["public_delegated_prefix_resource"] = { + "byoip_api_version": "byoip_api_version_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", "fingerprint": "fingerprint_value", @@ -1915,7 +1918,7 @@ def test_insert_rest_flattened(): mock_args = dict( project="project_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" + byoip_api_version="byoip_api_version_value" ), ) mock_args.update(sample_request) @@ -1955,7 +1958,7 @@ def test_insert_rest_flattened_error(transport: str = "rest"): compute.InsertGlobalPublicDelegatedPrefixeRequest(), project="project_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" + byoip_api_version="byoip_api_version_value" ), ) @@ -1982,6 +1985,7 @@ def test_insert_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1"} request_init["public_delegated_prefix_resource"] = { + "byoip_api_version": "byoip_api_version_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", "fingerprint": "fingerprint_value", @@ -2322,7 +2326,7 @@ def test_insert_unary_rest_flattened(): mock_args = dict( project="project_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" + byoip_api_version="byoip_api_version_value" ), ) mock_args.update(sample_request) @@ -2362,7 +2366,7 @@ def test_insert_unary_rest_flattened_error(transport: str = "rest"): compute.InsertGlobalPublicDelegatedPrefixeRequest(), project="project_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" + byoip_api_version="byoip_api_version_value" ), ) @@ -2743,6 +2747,7 @@ def test_patch_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "public_delegated_prefix": "sample2"} request_init["public_delegated_prefix_resource"] = { + "byoip_api_version": "byoip_api_version_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", "fingerprint": "fingerprint_value", @@ -3111,7 +3116,7 @@ def test_patch_rest_flattened(): project="project_value", public_delegated_prefix="public_delegated_prefix_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" + byoip_api_version="byoip_api_version_value" ), ) mock_args.update(sample_request) @@ -3152,7 +3157,7 @@ def test_patch_rest_flattened_error(transport: str = "rest"): project="project_value", public_delegated_prefix="public_delegated_prefix_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" + byoip_api_version="byoip_api_version_value" ), ) @@ -3179,6 +3184,7 @@ def test_patch_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "public_delegated_prefix": "sample2"} request_init["public_delegated_prefix_resource"] = { + "byoip_api_version": "byoip_api_version_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", "fingerprint": "fingerprint_value", @@ -3525,7 +3531,7 @@ def test_patch_unary_rest_flattened(): project="project_value", public_delegated_prefix="public_delegated_prefix_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" + byoip_api_version="byoip_api_version_value" ), ) mock_args.update(sample_request) @@ -3566,7 +3572,7 @@ def test_patch_unary_rest_flattened_error(transport: str = "rest"): project="project_value", public_delegated_prefix="public_delegated_prefix_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" + byoip_api_version="byoip_api_version_value" ), ) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py index 4993fcdb630d..0609a564a980 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py @@ -637,6 +637,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -701,6 +702,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py index 9469e1ab1ba7..08ecab5a3e45 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py @@ -1532,6 +1532,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -1596,6 +1597,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) @@ -2704,7 +2706,12 @@ def test_create_instances_rest(request_type): { "fingerprint": "fingerprint_value", "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, + "preserved_state": { + "disks": {}, + "external_i_ps": {}, + "internal_i_ps": {}, + "metadata": {}, + }, "status": "status_value", } ] @@ -3151,7 +3158,12 @@ def test_create_instances_unary_rest(request_type): { "fingerprint": "fingerprint_value", "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, + "preserved_state": { + "disks": {}, + "external_i_ps": {}, + "internal_i_ps": {}, + "metadata": {}, + }, "status": "status_value", } ] @@ -6333,7 +6345,9 @@ def test_insert_rest(request_type): "named_ports": [{"name": "name_value", "port": 453}], "region": "region_value", "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, + "stateful_policy": { + "preserved_state": {"disks": {}, "external_i_ps": {}, "internal_i_ps": {}} + }, "status": { "autoscaler": "autoscaler_value", "is_stable": True, @@ -6820,7 +6834,9 @@ def test_insert_unary_rest(request_type): "named_ports": [{"name": "name_value", "port": 453}], "region": "region_value", "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, + "stateful_policy": { + "preserved_state": {"disks": {}, "external_i_ps": {}, "internal_i_ps": {}} + }, "status": { "autoscaler": "autoscaler_value", "is_stable": True, @@ -8827,7 +8843,9 @@ def test_patch_rest(request_type): "named_ports": [{"name": "name_value", "port": 453}], "region": "region_value", "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, + "stateful_policy": { + "preserved_state": {"disks": {}, "external_i_ps": {}, "internal_i_ps": {}} + }, "status": { "autoscaler": "autoscaler_value", "is_stable": True, @@ -9333,7 +9351,9 @@ def test_patch_unary_rest(request_type): "named_ports": [{"name": "name_value", "port": 453}], "region": "region_value", "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, + "stateful_policy": { + "preserved_state": {"disks": {}, "external_i_ps": {}, "internal_i_ps": {}} + }, "status": { "autoscaler": "autoscaler_value", "is_stable": True, @@ -9783,7 +9803,12 @@ def test_patch_per_instance_configs_rest(request_type): { "fingerprint": "fingerprint_value", "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, + "preserved_state": { + "disks": {}, + "external_i_ps": {}, + "internal_i_ps": {}, + "metadata": {}, + }, "status": "status_value", } ] @@ -10236,7 +10261,12 @@ def test_patch_per_instance_configs_unary_rest(request_type): { "fingerprint": "fingerprint_value", "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, + "preserved_state": { + "disks": {}, + "external_i_ps": {}, + "internal_i_ps": {}, + "metadata": {}, + }, "status": "status_value", } ] @@ -13963,7 +13993,12 @@ def test_update_per_instance_configs_rest(request_type): { "fingerprint": "fingerprint_value", "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, + "preserved_state": { + "disks": {}, + "external_i_ps": {}, + "internal_i_ps": {}, + "metadata": {}, + }, "status": "status_value", } ] @@ -14418,7 +14453,12 @@ def test_update_per_instance_configs_unary_rest(request_type): { "fingerprint": "fingerprint_value", "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, + "preserved_state": { + "disks": {}, + "external_i_ps": {}, + "internal_i_ps": {}, + "metadata": {}, + }, "status": "status_value", } ] diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py index e4c57930eb72..782a9106c43a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py @@ -1502,6 +1502,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -1566,6 +1567,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py index 42e1a442a26d..ba887bf720cf 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py @@ -664,6 +664,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -728,6 +729,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) @@ -2281,6 +2283,7 @@ def test_insert_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -2867,6 +2870,7 @@ def test_insert_unary_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py index 88ed7ed1da96..275915dbff66 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py @@ -568,6 +568,7 @@ def test_add_access_config_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -1019,6 +1020,7 @@ def test_add_access_config_unary_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -2349,6 +2351,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -2413,6 +2416,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) @@ -3665,6 +3669,7 @@ def test_bulk_insert_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -4249,6 +4254,7 @@ def test_bulk_insert_unary_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -9058,6 +9064,7 @@ def test_insert_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -9095,7 +9102,17 @@ def test_insert_rest(request_type): "values": ["values_value1", "values_value2"], }, "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": {"physical_host": "physical_host_value"}, + "resource_status": { + "physical_host": "physical_host_value", + "upcoming_maintenance": { + "can_reschedule": True, + "latest_window_start_time": "latest_window_start_time_value", + "maintenance_status": "maintenance_status_value", + "type_": "type__value", + "window_end_time": "window_end_time_value", + "window_start_time": "window_start_time_value", + }, + }, "satisfies_pzs": True, "scheduling": { "automatic_restart": True, @@ -9650,6 +9667,7 @@ def test_insert_unary_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -9687,7 +9705,17 @@ def test_insert_unary_rest(request_type): "values": ["values_value1", "values_value2"], }, "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": {"physical_host": "physical_host_value"}, + "resource_status": { + "physical_host": "physical_host_value", + "upcoming_maintenance": { + "can_reschedule": True, + "latest_window_start_time": "latest_window_start_time_value", + "maintenance_status": "maintenance_status_value", + "type_": "type__value", + "window_end_time": "window_end_time_value", + "window_start_time": "window_start_time_value", + }, + }, "satisfies_pzs": True, "scheduling": { "automatic_restart": True, @@ -20895,11 +20923,11 @@ def test_set_scheduling_unary_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.SetServiceAccountInstanceRequest, + compute.SetSecurityPolicyInstanceRequest, dict, ], ) -def test_set_service_account_rest(request_type): +def test_set_security_policy_rest(request_type): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20907,17 +20935,20 @@ def test_set_service_account_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_set_service_account_request_resource"] = { - "email": "email_value", - "scopes": ["scopes_value1", "scopes_value2"], + request_init["instances_set_security_policy_request_resource"] = { + "network_interfaces": [ + "network_interfaces_value1", + "network_interfaces_value2", + ], + "security_policy": "security_policy_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = compute.SetServiceAccountInstanceRequest.meta.fields[ - "instances_set_service_account_request_resource" + test_field = compute.SetSecurityPolicyInstanceRequest.meta.fields[ + "instances_set_security_policy_request_resource" ] def get_message_fields(field): @@ -20947,7 +20978,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime for field, value in request_init[ - "instances_set_service_account_request_resource" + "instances_set_security_policy_request_resource" ].items(): # pragma: NO COVER result = None is_repeated = False @@ -20981,16 +21012,16 @@ def get_message_fields(field): for i in range( 0, len( - request_init["instances_set_service_account_request_resource"][ + request_init["instances_set_security_policy_request_resource"][ field ] ), ): - del request_init["instances_set_service_account_request_resource"][ + del request_init["instances_set_security_policy_request_resource"][ field ][i][subfield] else: - del request_init["instances_set_service_account_request_resource"][ + del request_init["instances_set_security_policy_request_resource"][ field ][subfield] request = request_type(**request_init) @@ -21032,7 +21063,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_service_account(request) + response = client.set_security_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, extended_operation.ExtendedOperation) @@ -21060,8 +21091,8 @@ def get_message_fields(field): assert response.zone == "zone_value" -def test_set_service_account_rest_required_fields( - request_type=compute.SetServiceAccountInstanceRequest, +def test_set_security_policy_rest_required_fields( + request_type=compute.SetSecurityPolicyInstanceRequest, ): transport_class = transports.InstancesRestTransport @@ -21083,7 +21114,7 @@ def test_set_service_account_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_service_account._get_unset_required_fields(jsonified_request) + ).set_security_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -21094,7 +21125,7 @@ def test_set_service_account_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_service_account._get_unset_required_fields(jsonified_request) + ).set_security_policy._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -21142,25 +21173,25 @@ def test_set_service_account_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_service_account(request) + response = client.set_security_policy(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_service_account_rest_unset_required_fields(): +def test_set_security_policy_rest_unset_required_fields(): transport = transports.InstancesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.set_service_account._get_unset_required_fields({}) + unset_fields = transport.set_security_policy._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( ( "instance", - "instancesSetServiceAccountRequestResource", + "instancesSetSecurityPolicyRequestResource", "project", "zone", ) @@ -21169,7 +21200,7 @@ def test_set_service_account_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_service_account_rest_interceptors(null_interceptor): +def test_set_security_policy_rest_interceptors(null_interceptor): transport = transports.InstancesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), @@ -21180,14 +21211,14 @@ def test_set_service_account_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.InstancesRestInterceptor, "post_set_service_account" + transports.InstancesRestInterceptor, "post_set_security_policy" ) as post, mock.patch.object( - transports.InstancesRestInterceptor, "pre_set_service_account" + transports.InstancesRestInterceptor, "pre_set_security_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.SetServiceAccountInstanceRequest.pb( - compute.SetServiceAccountInstanceRequest() + pb_message = compute.SetSecurityPolicyInstanceRequest.pb( + compute.SetSecurityPolicyInstanceRequest() ) transcode.return_value = { "method": "post", @@ -21201,7 +21232,7 @@ def test_set_service_account_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.SetServiceAccountInstanceRequest() + request = compute.SetSecurityPolicyInstanceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -21209,7 +21240,7 @@ def test_set_service_account_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.set_service_account( + client.set_security_policy( request, metadata=[ ("key", "val"), @@ -21221,8 +21252,8 @@ def test_set_service_account_rest_interceptors(null_interceptor): post.assert_called_once() -def test_set_service_account_rest_bad_request( - transport: str = "rest", request_type=compute.SetServiceAccountInstanceRequest +def test_set_security_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetSecurityPolicyInstanceRequest ): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21242,10 +21273,10 @@ def test_set_service_account_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.set_service_account(request) + client.set_security_policy(request) -def test_set_service_account_rest_flattened(): +def test_set_security_policy_rest_flattened(): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21268,8 +21299,8 @@ def test_set_service_account_rest_flattened(): project="project_value", zone="zone_value", instance="instance_value", - instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest( - email="email_value" + instances_set_security_policy_request_resource=compute.InstancesSetSecurityPolicyRequest( + network_interfaces=["network_interfaces_value"] ), ) mock_args.update(sample_request) @@ -21283,20 +21314,20 @@ def test_set_service_account_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.set_service_account(**mock_args) + client.set_security_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount" + "%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setSecurityPolicy" % client.transport._host, args[1], ) -def test_set_service_account_rest_flattened_error(transport: str = "rest"): +def test_set_security_policy_rest_flattened_error(transport: str = "rest"): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21305,18 +21336,18 @@ def test_set_service_account_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_service_account( - compute.SetServiceAccountInstanceRequest(), + client.set_security_policy( + compute.SetSecurityPolicyInstanceRequest(), project="project_value", zone="zone_value", instance="instance_value", - instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest( - email="email_value" + instances_set_security_policy_request_resource=compute.InstancesSetSecurityPolicyRequest( + network_interfaces=["network_interfaces_value"] ), ) -def test_set_service_account_rest_error(): +def test_set_security_policy_rest_error(): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21325,11 +21356,11 @@ def test_set_service_account_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.SetServiceAccountInstanceRequest, + compute.SetSecurityPolicyInstanceRequest, dict, ], ) -def test_set_service_account_unary_rest(request_type): +def test_set_security_policy_unary_rest(request_type): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21337,17 +21368,20 @@ def test_set_service_account_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["instances_set_service_account_request_resource"] = { - "email": "email_value", - "scopes": ["scopes_value1", "scopes_value2"], + request_init["instances_set_security_policy_request_resource"] = { + "network_interfaces": [ + "network_interfaces_value1", + "network_interfaces_value2", + ], + "security_policy": "security_policy_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = compute.SetServiceAccountInstanceRequest.meta.fields[ - "instances_set_service_account_request_resource" + test_field = compute.SetSecurityPolicyInstanceRequest.meta.fields[ + "instances_set_security_policy_request_resource" ] def get_message_fields(field): @@ -21377,7 +21411,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime for field, value in request_init[ - "instances_set_service_account_request_resource" + "instances_set_security_policy_request_resource" ].items(): # pragma: NO COVER result = None is_repeated = False @@ -21411,16 +21445,16 @@ def get_message_fields(field): for i in range( 0, len( - request_init["instances_set_service_account_request_resource"][ + request_init["instances_set_security_policy_request_resource"][ field ] ), ): - del request_init["instances_set_service_account_request_resource"][ + del request_init["instances_set_security_policy_request_resource"][ field ][i][subfield] else: - del request_init["instances_set_service_account_request_resource"][ + del request_init["instances_set_security_policy_request_resource"][ field ][subfield] request = request_type(**request_init) @@ -21462,14 +21496,14 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_service_account_unary(request) + response = client.set_security_policy_unary(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Operation) -def test_set_service_account_unary_rest_required_fields( - request_type=compute.SetServiceAccountInstanceRequest, +def test_set_security_policy_unary_rest_required_fields( + request_type=compute.SetSecurityPolicyInstanceRequest, ): transport_class = transports.InstancesRestTransport @@ -21491,7 +21525,7 @@ def test_set_service_account_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_service_account._get_unset_required_fields(jsonified_request) + ).set_security_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -21502,7 +21536,7 @@ def test_set_service_account_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_service_account._get_unset_required_fields(jsonified_request) + ).set_security_policy._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -21550,25 +21584,25 @@ def test_set_service_account_unary_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_service_account_unary(request) + response = client.set_security_policy_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_service_account_unary_rest_unset_required_fields(): +def test_set_security_policy_unary_rest_unset_required_fields(): transport = transports.InstancesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.set_service_account._get_unset_required_fields({}) + unset_fields = transport.set_security_policy._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( ( "instance", - "instancesSetServiceAccountRequestResource", + "instancesSetSecurityPolicyRequestResource", "project", "zone", ) @@ -21577,7 +21611,7 @@ def test_set_service_account_unary_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_service_account_unary_rest_interceptors(null_interceptor): +def test_set_security_policy_unary_rest_interceptors(null_interceptor): transport = transports.InstancesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), @@ -21588,14 +21622,14 @@ def test_set_service_account_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.InstancesRestInterceptor, "post_set_service_account" + transports.InstancesRestInterceptor, "post_set_security_policy" ) as post, mock.patch.object( - transports.InstancesRestInterceptor, "pre_set_service_account" + transports.InstancesRestInterceptor, "pre_set_security_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.SetServiceAccountInstanceRequest.pb( - compute.SetServiceAccountInstanceRequest() + pb_message = compute.SetSecurityPolicyInstanceRequest.pb( + compute.SetSecurityPolicyInstanceRequest() ) transcode.return_value = { "method": "post", @@ -21609,7 +21643,7 @@ def test_set_service_account_unary_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.SetServiceAccountInstanceRequest() + request = compute.SetSecurityPolicyInstanceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -21617,7 +21651,7 @@ def test_set_service_account_unary_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.set_service_account_unary( + client.set_security_policy_unary( request, metadata=[ ("key", "val"), @@ -21629,8 +21663,8 @@ def test_set_service_account_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_set_service_account_unary_rest_bad_request( - transport: str = "rest", request_type=compute.SetServiceAccountInstanceRequest +def test_set_security_policy_unary_rest_bad_request( + transport: str = "rest", request_type=compute.SetSecurityPolicyInstanceRequest ): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -21650,10 +21684,10 @@ def test_set_service_account_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.set_service_account_unary(request) + client.set_security_policy_unary(request) -def test_set_service_account_unary_rest_flattened(): +def test_set_security_policy_unary_rest_flattened(): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21676,8 +21710,8 @@ def test_set_service_account_unary_rest_flattened(): project="project_value", zone="zone_value", instance="instance_value", - instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest( - email="email_value" + instances_set_security_policy_request_resource=compute.InstancesSetSecurityPolicyRequest( + network_interfaces=["network_interfaces_value"] ), ) mock_args.update(sample_request) @@ -21691,20 +21725,20 @@ def test_set_service_account_unary_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.set_service_account_unary(**mock_args) + client.set_security_policy_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount" + "%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setSecurityPolicy" % client.transport._host, args[1], ) -def test_set_service_account_unary_rest_flattened_error(transport: str = "rest"): +def test_set_security_policy_unary_rest_flattened_error(transport: str = "rest"): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -21713,18 +21747,18 @@ def test_set_service_account_unary_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_service_account_unary( - compute.SetServiceAccountInstanceRequest(), + client.set_security_policy_unary( + compute.SetSecurityPolicyInstanceRequest(), project="project_value", zone="zone_value", instance="instance_value", - instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest( - email="email_value" + instances_set_security_policy_request_resource=compute.InstancesSetSecurityPolicyRequest( + network_interfaces=["network_interfaces_value"] ), ) -def test_set_service_account_unary_rest_error(): +def test_set_security_policy_unary_rest_error(): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -21733,11 +21767,11 @@ def test_set_service_account_unary_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, + compute.SetServiceAccountInstanceRequest, dict, ], ) -def test_set_shielded_instance_integrity_policy_rest(request_type): +def test_set_service_account_rest(request_type): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -21745,16 +21779,17 @@ def test_set_shielded_instance_integrity_policy_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["shielded_instance_integrity_policy_resource"] = { - "update_auto_learn_policy": True + request_init["instances_set_service_account_request_resource"] = { + "email": "email_value", + "scopes": ["scopes_value1", "scopes_value2"], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.meta.fields[ - "shielded_instance_integrity_policy_resource" + test_field = compute.SetServiceAccountInstanceRequest.meta.fields[ + "instances_set_service_account_request_resource" ] def get_message_fields(field): @@ -21784,7 +21819,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime for field, value in request_init[ - "shielded_instance_integrity_policy_resource" + "instances_set_service_account_request_resource" ].items(): # pragma: NO COVER result = None is_repeated = False @@ -21818,18 +21853,18 @@ def get_message_fields(field): for i in range( 0, len( - request_init["shielded_instance_integrity_policy_resource"][ + request_init["instances_set_service_account_request_resource"][ field ] ), ): - del request_init["shielded_instance_integrity_policy_resource"][ + del request_init["instances_set_service_account_request_resource"][ field ][i][subfield] else: - del request_init["shielded_instance_integrity_policy_resource"][field][ - subfield - ] + del request_init["instances_set_service_account_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -21869,7 +21904,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_shielded_instance_integrity_policy(request) + response = client.set_service_account(request) # Establish that the response is the type that we expect. assert isinstance(response, extended_operation.ExtendedOperation) @@ -21897,8 +21932,8 @@ def get_message_fields(field): assert response.zone == "zone_value" -def test_set_shielded_instance_integrity_policy_rest_required_fields( - request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, +def test_set_service_account_rest_required_fields( + request_type=compute.SetServiceAccountInstanceRequest, ): transport_class = transports.InstancesRestTransport @@ -21920,9 +21955,7 @@ def test_set_shielded_instance_integrity_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_shielded_instance_integrity_policy._get_unset_required_fields( - jsonified_request - ) + ).set_service_account._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -21933,9 +21966,7 @@ def test_set_shielded_instance_integrity_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_shielded_instance_integrity_policy._get_unset_required_fields( - jsonified_request - ) + ).set_service_account._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -21967,7 +21998,7 @@ def test_set_shielded_instance_integrity_policy_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -21983,28 +22014,26 @@ def test_set_shielded_instance_integrity_policy_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_shielded_instance_integrity_policy(request) + response = client.set_service_account(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_shielded_instance_integrity_policy_rest_unset_required_fields(): +def test_set_service_account_rest_unset_required_fields(): transport = transports.InstancesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.set_shielded_instance_integrity_policy._get_unset_required_fields({}) - ) + unset_fields = transport.set_service_account._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( ( "instance", + "instancesSetServiceAccountRequestResource", "project", - "shieldedInstanceIntegrityPolicyResource", "zone", ) ) @@ -22012,7 +22041,7 @@ def test_set_shielded_instance_integrity_policy_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_shielded_instance_integrity_policy_rest_interceptors(null_interceptor): +def test_set_service_account_rest_interceptors(null_interceptor): transport = transports.InstancesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), @@ -22023,16 +22052,14 @@ def test_set_shielded_instance_integrity_policy_rest_interceptors(null_intercept ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.InstancesRestInterceptor, - "post_set_shielded_instance_integrity_policy", + transports.InstancesRestInterceptor, "post_set_service_account" ) as post, mock.patch.object( - transports.InstancesRestInterceptor, - "pre_set_shielded_instance_integrity_policy", + transports.InstancesRestInterceptor, "pre_set_service_account" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.pb( - compute.SetShieldedInstanceIntegrityPolicyInstanceRequest() + pb_message = compute.SetServiceAccountInstanceRequest.pb( + compute.SetServiceAccountInstanceRequest() ) transcode.return_value = { "method": "post", @@ -22046,7 +22073,7 @@ def test_set_shielded_instance_integrity_policy_rest_interceptors(null_intercept req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest() + request = compute.SetServiceAccountInstanceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22054,7 +22081,7 @@ def test_set_shielded_instance_integrity_policy_rest_interceptors(null_intercept pre.return_value = request, metadata post.return_value = compute.Operation() - client.set_shielded_instance_integrity_policy( + client.set_service_account( request, metadata=[ ("key", "val"), @@ -22066,9 +22093,8 @@ def test_set_shielded_instance_integrity_policy_rest_interceptors(null_intercept post.assert_called_once() -def test_set_shielded_instance_integrity_policy_rest_bad_request( - transport: str = "rest", - request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, +def test_set_service_account_rest_bad_request( + transport: str = "rest", request_type=compute.SetServiceAccountInstanceRequest ): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22088,10 +22114,10 @@ def test_set_shielded_instance_integrity_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.set_shielded_instance_integrity_policy(request) + client.set_service_account(request) -def test_set_shielded_instance_integrity_policy_rest_flattened(): +def test_set_service_account_rest_flattened(): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22114,8 +22140,8 @@ def test_set_shielded_instance_integrity_policy_rest_flattened(): project="project_value", zone="zone_value", instance="instance_value", - shielded_instance_integrity_policy_resource=compute.ShieldedInstanceIntegrityPolicy( - update_auto_learn_policy=True + instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest( + email="email_value" ), ) mock_args.update(sample_request) @@ -22129,22 +22155,20 @@ def test_set_shielded_instance_integrity_policy_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.set_shielded_instance_integrity_policy(**mock_args) + client.set_service_account(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setShieldedInstanceIntegrityPolicy" + "%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount" % client.transport._host, args[1], ) -def test_set_shielded_instance_integrity_policy_rest_flattened_error( - transport: str = "rest", -): +def test_set_service_account_rest_flattened_error(transport: str = "rest"): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22153,18 +22177,18 @@ def test_set_shielded_instance_integrity_policy_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_shielded_instance_integrity_policy( - compute.SetShieldedInstanceIntegrityPolicyInstanceRequest(), + client.set_service_account( + compute.SetServiceAccountInstanceRequest(), project="project_value", zone="zone_value", instance="instance_value", - shielded_instance_integrity_policy_resource=compute.ShieldedInstanceIntegrityPolicy( - update_auto_learn_policy=True + instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest( + email="email_value" ), ) -def test_set_shielded_instance_integrity_policy_rest_error(): +def test_set_service_account_rest_error(): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -22173,11 +22197,11 @@ def test_set_shielded_instance_integrity_policy_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, + compute.SetServiceAccountInstanceRequest, dict, ], ) -def test_set_shielded_instance_integrity_policy_unary_rest(request_type): +def test_set_service_account_unary_rest(request_type): client = InstancesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22185,16 +22209,17 @@ def test_set_shielded_instance_integrity_policy_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} - request_init["shielded_instance_integrity_policy_resource"] = { - "update_auto_learn_policy": True + request_init["instances_set_service_account_request_resource"] = { + "email": "email_value", + "scopes": ["scopes_value1", "scopes_value2"], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.meta.fields[ - "shielded_instance_integrity_policy_resource" + test_field = compute.SetServiceAccountInstanceRequest.meta.fields[ + "instances_set_service_account_request_resource" ] def get_message_fields(field): @@ -22224,7 +22249,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime for field, value in request_init[ - "shielded_instance_integrity_policy_resource" + "instances_set_service_account_request_resource" ].items(): # pragma: NO COVER result = None is_repeated = False @@ -22258,18 +22283,18 @@ def get_message_fields(field): for i in range( 0, len( - request_init["shielded_instance_integrity_policy_resource"][ + request_init["instances_set_service_account_request_resource"][ field ] ), ): - del request_init["shielded_instance_integrity_policy_resource"][ + del request_init["instances_set_service_account_request_resource"][ field ][i][subfield] else: - del request_init["shielded_instance_integrity_policy_resource"][field][ - subfield - ] + del request_init["instances_set_service_account_request_resource"][ + field + ][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22309,14 +22334,14 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_shielded_instance_integrity_policy_unary(request) + response = client.set_service_account_unary(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Operation) -def test_set_shielded_instance_integrity_policy_unary_rest_required_fields( - request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, +def test_set_service_account_unary_rest_required_fields( + request_type=compute.SetServiceAccountInstanceRequest, ): transport_class = transports.InstancesRestTransport @@ -22338,9 +22363,7 @@ def test_set_shielded_instance_integrity_policy_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_shielded_instance_integrity_policy._get_unset_required_fields( - jsonified_request - ) + ).set_service_account._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -22351,9 +22374,7 @@ def test_set_shielded_instance_integrity_policy_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_shielded_instance_integrity_policy._get_unset_required_fields( - jsonified_request - ) + ).set_service_account._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -22385,7 +22406,7 @@ def test_set_shielded_instance_integrity_policy_unary_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -22401,28 +22422,26 @@ def test_set_shielded_instance_integrity_policy_unary_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_shielded_instance_integrity_policy_unary(request) + response = client.set_service_account_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_shielded_instance_integrity_policy_unary_rest_unset_required_fields(): +def test_set_service_account_unary_rest_unset_required_fields(): transport = transports.InstancesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.set_shielded_instance_integrity_policy._get_unset_required_fields({}) - ) + unset_fields = transport.set_service_account._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( ( "instance", + "instancesSetServiceAccountRequestResource", "project", - "shieldedInstanceIntegrityPolicyResource", "zone", ) ) @@ -22430,9 +22449,7 @@ def test_set_shielded_instance_integrity_policy_unary_rest_unset_required_fields @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_shielded_instance_integrity_policy_unary_rest_interceptors( - null_interceptor, -): +def test_set_service_account_unary_rest_interceptors(null_interceptor): transport = transports.InstancesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), @@ -22443,16 +22460,14 @@ def test_set_shielded_instance_integrity_policy_unary_rest_interceptors( ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.InstancesRestInterceptor, - "post_set_shielded_instance_integrity_policy", + transports.InstancesRestInterceptor, "post_set_service_account" ) as post, mock.patch.object( - transports.InstancesRestInterceptor, - "pre_set_shielded_instance_integrity_policy", + transports.InstancesRestInterceptor, "pre_set_service_account" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.pb( - compute.SetShieldedInstanceIntegrityPolicyInstanceRequest() + pb_message = compute.SetServiceAccountInstanceRequest.pb( + compute.SetServiceAccountInstanceRequest() ) transcode.return_value = { "method": "post", @@ -22466,7 +22481,864 @@ def test_set_shielded_instance_integrity_policy_unary_rest_interceptors( req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest() + request = compute.SetServiceAccountInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_service_account_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_service_account_unary_rest_bad_request( + transport: str = "rest", request_type=compute.SetServiceAccountInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_service_account_unary(request) + + +def test_set_service_account_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instance="instance_value", + instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest( + email="email_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_service_account_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount" + % client.transport._host, + args[1], + ) + + +def test_set_service_account_unary_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_service_account_unary( + compute.SetServiceAccountInstanceRequest(), + project="project_value", + zone="zone_value", + instance="instance_value", + instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest( + email="email_value" + ), + ) + + +def test_set_service_account_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, + dict, + ], +) +def test_set_shielded_instance_integrity_policy_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["shielded_instance_integrity_policy_resource"] = { + "update_auto_learn_policy": True + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.meta.fields[ + "shielded_instance_integrity_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "shielded_instance_integrity_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["shielded_instance_integrity_policy_resource"][ + field + ] + ), + ): + del request_init["shielded_instance_integrity_policy_resource"][ + field + ][i][subfield] + else: + del request_init["shielded_instance_integrity_policy_resource"][field][ + subfield + ] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_shielded_instance_integrity_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_set_shielded_instance_integrity_policy_rest_required_fields( + request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, +): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_shielded_instance_integrity_policy._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = "instance_value" + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_shielded_instance_integrity_policy._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == "instance_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_shielded_instance_integrity_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_shielded_instance_integrity_policy_rest_unset_required_fields(): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.set_shielded_instance_integrity_policy._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instance", + "project", + "shieldedInstanceIntegrityPolicyResource", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_shielded_instance_integrity_policy_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InstancesRestInterceptor, + "post_set_shielded_instance_integrity_policy", + ) as post, mock.patch.object( + transports.InstancesRestInterceptor, + "pre_set_shielded_instance_integrity_policy", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.pb( + compute.SetShieldedInstanceIntegrityPolicyInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_shielded_instance_integrity_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_shielded_instance_integrity_policy_rest_bad_request( + transport: str = "rest", + request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_shielded_instance_integrity_policy(request) + + +def test_set_shielded_instance_integrity_policy_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instance="instance_value", + shielded_instance_integrity_policy_resource=compute.ShieldedInstanceIntegrityPolicy( + update_auto_learn_policy=True + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_shielded_instance_integrity_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setShieldedInstanceIntegrityPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_shielded_instance_integrity_policy_rest_flattened_error( + transport: str = "rest", +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_shielded_instance_integrity_policy( + compute.SetShieldedInstanceIntegrityPolicyInstanceRequest(), + project="project_value", + zone="zone_value", + instance="instance_value", + shielded_instance_integrity_policy_resource=compute.ShieldedInstanceIntegrityPolicy( + update_auto_learn_policy=True + ), + ) + + +def test_set_shielded_instance_integrity_policy_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, + dict, + ], +) +def test_set_shielded_instance_integrity_policy_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["shielded_instance_integrity_policy_resource"] = { + "update_auto_learn_policy": True + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.meta.fields[ + "shielded_instance_integrity_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "shielded_instance_integrity_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["shielded_instance_integrity_policy_resource"][ + field + ] + ), + ): + del request_init["shielded_instance_integrity_policy_resource"][ + field + ][i][subfield] + else: + del request_init["shielded_instance_integrity_policy_resource"][field][ + subfield + ] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_shielded_instance_integrity_policy_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_shielded_instance_integrity_policy_unary_rest_required_fields( + request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, +): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_shielded_instance_integrity_policy._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = "instance_value" + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_shielded_instance_integrity_policy._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == "instance_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_shielded_instance_integrity_policy_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_shielded_instance_integrity_policy_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.set_shielded_instance_integrity_policy._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instance", + "project", + "shieldedInstanceIntegrityPolicyResource", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_shielded_instance_integrity_policy_unary_rest_interceptors( + null_interceptor, +): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InstancesRestInterceptor, + "post_set_shielded_instance_integrity_policy", + ) as post, mock.patch.object( + transports.InstancesRestInterceptor, + "pre_set_shielded_instance_integrity_policy", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.pb( + compute.SetShieldedInstanceIntegrityPolicyInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -27384,6 +28256,7 @@ def test_update_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -27421,7 +28294,17 @@ def test_update_rest(request_type): "values": ["values_value1", "values_value2"], }, "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": {"physical_host": "physical_host_value"}, + "resource_status": { + "physical_host": "physical_host_value", + "upcoming_maintenance": { + "can_reschedule": True, + "latest_window_start_time": "latest_window_start_time_value", + "maintenance_status": "maintenance_status_value", + "type_": "type__value", + "window_end_time": "window_end_time_value", + "window_start_time": "window_start_time_value", + }, + }, "satisfies_pzs": True, "scheduling": { "automatic_restart": True, @@ -27987,6 +28870,7 @@ def test_update_unary_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -28024,7 +28908,17 @@ def test_update_unary_rest(request_type): "values": ["values_value1", "values_value2"], }, "resource_policies": ["resource_policies_value1", "resource_policies_value2"], - "resource_status": {"physical_host": "physical_host_value"}, + "resource_status": { + "physical_host": "physical_host_value", + "upcoming_maintenance": { + "can_reschedule": True, + "latest_window_start_time": "latest_window_start_time_value", + "maintenance_status": "maintenance_status_value", + "type_": "type__value", + "window_end_time": "window_end_time_value", + "window_start_time": "window_start_time_value", + }, + }, "satisfies_pzs": True, "scheduling": { "automatic_restart": True, @@ -28473,6 +29367,7 @@ def test_update_access_config_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -28924,6 +29819,7 @@ def test_update_access_config_unary_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -30157,6 +31053,7 @@ def test_update_network_interface_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -30638,6 +31535,7 @@ def test_update_network_interface_unary_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -32048,6 +32946,7 @@ def test_instances_base_transport(): "set_min_cpu_platform", "set_name", "set_scheduling", + "set_security_policy", "set_service_account", "set_shielded_instance_integrity_policy", "set_tags", @@ -32295,6 +33194,9 @@ def test_instances_client_transport_session_collision(transport_name): session1 = client1.transport.set_scheduling._session session2 = client2.transport.set_scheduling._session assert session1 != session2 + session1 = client1.transport.set_security_policy._session + session2 = client2.transport.set_security_policy._session + assert session1 != session2 session1 = client1.transport.set_service_account._session session2 = client2.transport.set_service_account._session assert session1 != session2 diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py index d203f0663900..7cfd1cbb3d6f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py @@ -676,6 +676,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -740,6 +741,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py index 32936ac9e380..82c5f601e9b4 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py @@ -602,6 +602,8 @@ def test_get_rest(request_type): return_value = compute.InterconnectLocation( address="address_value", availability_zone="availability_zone_value", + available_features=["available_features_value"], + available_link_types=["available_link_types_value"], city="city_value", continent="continent_value", creation_timestamp="creation_timestamp_value", @@ -632,6 +634,8 @@ def test_get_rest(request_type): assert isinstance(response, compute.InterconnectLocation) assert response.address == "address_value" assert response.availability_zone == "availability_zone_value" + assert response.available_features == ["available_features_value"] + assert response.available_link_types == ["available_link_types_value"] assert response.city == "city_value" assert response.continent == "continent_value" assert response.creation_timestamp == "creation_timestamp_value" diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py index 67e4342f82c2..d0d9ee3a661b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py @@ -1210,6 +1210,7 @@ def test_get_rest(request_type): # Designate an appropriate value for the returned response. return_value = compute.Interconnect( admin_enabled=True, + available_features=["available_features_value"], creation_timestamp="creation_timestamp_value", customer_name="customer_name_value", description="description_value", @@ -1222,12 +1223,14 @@ def test_get_rest(request_type): label_fingerprint="label_fingerprint_value", link_type="link_type_value", location="location_value", + macsec_enabled=True, name="name_value", noc_contact_email="noc_contact_email_value", operational_status="operational_status_value", peer_ip_address="peer_ip_address_value", provisioned_link_count=2375, remote_location="remote_location_value", + requested_features=["requested_features_value"], requested_link_count=2151, satisfies_pzs=True, self_link="self_link_value", @@ -1248,6 +1251,7 @@ def test_get_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, compute.Interconnect) assert response.admin_enabled is True + assert response.available_features == ["available_features_value"] assert response.creation_timestamp == "creation_timestamp_value" assert response.customer_name == "customer_name_value" assert response.description == "description_value" @@ -1260,12 +1264,14 @@ def test_get_rest(request_type): assert response.label_fingerprint == "label_fingerprint_value" assert response.link_type == "link_type_value" assert response.location == "location_value" + assert response.macsec_enabled is True assert response.name == "name_value" assert response.noc_contact_email == "noc_contact_email_value" assert response.operational_status == "operational_status_value" assert response.peer_ip_address == "peer_ip_address_value" assert response.provisioned_link_count == 2375 assert response.remote_location == "remote_location_value" + assert response.requested_features == ["requested_features_value"] assert response.requested_link_count == 2151 assert response.satisfies_pzs is True assert response.self_link == "self_link_value" @@ -1793,6 +1799,293 @@ def test_get_diagnostics_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + compute.GetMacsecConfigInterconnectRequest, + dict, + ], +) +def test_get_macsec_config_rest(request_type): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "interconnect": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectsGetMacsecConfigResponse( + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.InterconnectsGetMacsecConfigResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_macsec_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InterconnectsGetMacsecConfigResponse) + assert response.etag == "etag_value" + + +def test_get_macsec_config_rest_required_fields( + request_type=compute.GetMacsecConfigInterconnectRequest, +): + transport_class = transports.InterconnectsRestTransport + + request_init = {} + request_init["interconnect"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_macsec_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["interconnect"] = "interconnect_value" + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_macsec_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "interconnect" in jsonified_request + assert jsonified_request["interconnect"] == "interconnect_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectsGetMacsecConfigResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.InterconnectsGetMacsecConfigResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_macsec_config(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_macsec_config_rest_unset_required_fields(): + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_macsec_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "interconnect", + "project", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_macsec_config_rest_interceptors(null_interceptor): + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InterconnectsRestInterceptor(), + ) + client = InterconnectsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InterconnectsRestInterceptor, "post_get_macsec_config" + ) as post, mock.patch.object( + transports.InterconnectsRestInterceptor, "pre_get_macsec_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetMacsecConfigInterconnectRequest.pb( + compute.GetMacsecConfigInterconnectRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + compute.InterconnectsGetMacsecConfigResponse.to_json( + compute.InterconnectsGetMacsecConfigResponse() + ) + ) + + request = compute.GetMacsecConfigInterconnectRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InterconnectsGetMacsecConfigResponse() + + client.get_macsec_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_macsec_config_rest_bad_request( + transport: str = "rest", request_type=compute.GetMacsecConfigInterconnectRequest +): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "interconnect": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_macsec_config(request) + + +def test_get_macsec_config_rest_flattened(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectsGetMacsecConfigResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "interconnect": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + interconnect="interconnect_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.InterconnectsGetMacsecConfigResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_macsec_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/interconnects/{interconnect}/getMacsecConfig" + % client.transport._host, + args[1], + ) + + +def test_get_macsec_config_rest_flattened_error(transport: str = "rest"): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_macsec_config( + compute.GetMacsecConfigInterconnectRequest(), + project="project_value", + interconnect="interconnect_value", + ) + + +def test_get_macsec_config_rest_error(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1810,6 +2103,10 @@ def test_insert_rest(request_type): request_init = {"project": "sample1"} request_init["interconnect_resource"] = { "admin_enabled": True, + "available_features": [ + "available_features_value1", + "available_features_value2", + ], "circuit_infos": [ { "customer_demarc_id": "customer_demarc_id_value", @@ -1848,12 +2145,23 @@ def test_insert_rest(request_type): "labels": {}, "link_type": "link_type_value", "location": "location_value", + "macsec": { + "fail_open": True, + "pre_shared_keys": [ + {"name": "name_value", "start_time": "start_time_value"} + ], + }, + "macsec_enabled": True, "name": "name_value", "noc_contact_email": "noc_contact_email_value", "operational_status": "operational_status_value", "peer_ip_address": "peer_ip_address_value", "provisioned_link_count": 2375, "remote_location": "remote_location_value", + "requested_features": [ + "requested_features_value1", + "requested_features_value2", + ], "requested_link_count": 2151, "satisfies_pzs": True, "self_link": "self_link_value", @@ -2252,6 +2560,10 @@ def test_insert_unary_rest(request_type): request_init = {"project": "sample1"} request_init["interconnect_resource"] = { "admin_enabled": True, + "available_features": [ + "available_features_value1", + "available_features_value2", + ], "circuit_infos": [ { "customer_demarc_id": "customer_demarc_id_value", @@ -2290,12 +2602,23 @@ def test_insert_unary_rest(request_type): "labels": {}, "link_type": "link_type_value", "location": "location_value", + "macsec": { + "fail_open": True, + "pre_shared_keys": [ + {"name": "name_value", "start_time": "start_time_value"} + ], + }, + "macsec_enabled": True, "name": "name_value", "noc_contact_email": "noc_contact_email_value", "operational_status": "operational_status_value", "peer_ip_address": "peer_ip_address_value", "provisioned_link_count": 2375, "remote_location": "remote_location_value", + "requested_features": [ + "requested_features_value1", + "requested_features_value2", + ], "requested_link_count": 2151, "satisfies_pzs": True, "self_link": "self_link_value", @@ -3025,6 +3348,10 @@ def test_patch_rest(request_type): request_init = {"project": "sample1", "interconnect": "sample2"} request_init["interconnect_resource"] = { "admin_enabled": True, + "available_features": [ + "available_features_value1", + "available_features_value2", + ], "circuit_infos": [ { "customer_demarc_id": "customer_demarc_id_value", @@ -3063,12 +3390,23 @@ def test_patch_rest(request_type): "labels": {}, "link_type": "link_type_value", "location": "location_value", + "macsec": { + "fail_open": True, + "pre_shared_keys": [ + {"name": "name_value", "start_time": "start_time_value"} + ], + }, + "macsec_enabled": True, "name": "name_value", "noc_contact_email": "noc_contact_email_value", "operational_status": "operational_status_value", "peer_ip_address": "peer_ip_address_value", "provisioned_link_count": 2375, "remote_location": "remote_location_value", + "requested_features": [ + "requested_features_value1", + "requested_features_value2", + ], "requested_link_count": 2151, "satisfies_pzs": True, "self_link": "self_link_value", @@ -3474,6 +3812,10 @@ def test_patch_unary_rest(request_type): request_init = {"project": "sample1", "interconnect": "sample2"} request_init["interconnect_resource"] = { "admin_enabled": True, + "available_features": [ + "available_features_value1", + "available_features_value2", + ], "circuit_infos": [ { "customer_demarc_id": "customer_demarc_id_value", @@ -3512,12 +3854,23 @@ def test_patch_unary_rest(request_type): "labels": {}, "link_type": "link_type_value", "location": "location_value", + "macsec": { + "fail_open": True, + "pre_shared_keys": [ + {"name": "name_value", "start_time": "start_time_value"} + ], + }, + "macsec_enabled": True, "name": "name_value", "noc_contact_email": "noc_contact_email_value", "operational_status": "operational_status_value", "peer_ip_address": "peer_ip_address_value", "provisioned_link_count": 2375, "remote_location": "remote_location_value", + "requested_features": [ + "requested_features_value1", + "requested_features_value2", + ], "requested_link_count": 2151, "satisfies_pzs": True, "self_link": "self_link_value", @@ -4801,6 +5154,7 @@ def test_interconnects_base_transport(): "delete", "get", "get_diagnostics", + "get_macsec_config", "insert", "list", "patch", @@ -4951,6 +5305,9 @@ def test_interconnects_client_transport_session_collision(transport_name): session1 = client1.transport.get_diagnostics._session session2 = client2.transport.get_diagnostics._session assert session1 != session2 + session1 = client1.transport.get_macsec_config._session + session2 = client2.transport.get_macsec_config._session + assert session1 != session2 session1 = client1.transport.insert._session session2 = client2.transport.insert._session assert session1 != session2 diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py index 7b7a2a85420d..5a43df8a1812 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py @@ -1893,6 +1893,7 @@ def test_insert_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -2525,6 +2526,7 @@ def test_insert_unary_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py index 08ac5cf42697..641dd4c4a12a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py @@ -630,6 +630,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -694,6 +695,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py index 4cae460b896e..c8b5260dc881 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py @@ -664,6 +664,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -728,6 +729,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) @@ -2265,6 +2267,7 @@ def test_insert_rest(request_type): "connection_endpoints": [ { "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", "project_id_or_num": "project_id_or_num_value", "secondary_ip_cidr_ranges": [ "secondary_ip_cidr_ranges_value1", @@ -2272,6 +2275,7 @@ def test_insert_rest(request_type): ], "status": "status_value", "subnetwork": "subnetwork_value", + "subnetwork_cidr_range": "subnetwork_cidr_range_value", } ], "connection_preference": "connection_preference_value", @@ -2715,6 +2719,7 @@ def test_insert_unary_rest(request_type): "connection_endpoints": [ { "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", "project_id_or_num": "project_id_or_num_value", "secondary_ip_cidr_ranges": [ "secondary_ip_cidr_ranges_value1", @@ -2722,6 +2727,7 @@ def test_insert_unary_rest(request_type): ], "status": "status_value", "subnetwork": "subnetwork_value", + "subnetwork_cidr_range": "subnetwork_cidr_range_value", } ], "connection_preference": "connection_preference_value", @@ -3486,6 +3492,924 @@ def test_list_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchNetworkAttachmentRequest, + dict, + ], +) +def test_patch_rest(request_type): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_attachment": "sample3", + } + request_init["network_attachment_resource"] = { + "connection_endpoints": [ + { + "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", + "project_id_or_num": "project_id_or_num_value", + "secondary_ip_cidr_ranges": [ + "secondary_ip_cidr_ranges_value1", + "secondary_ip_cidr_ranges_value2", + ], + "status": "status_value", + "subnetwork": "subnetwork_value", + "subnetwork_cidr_range": "subnetwork_cidr_range_value", + } + ], + "connection_preference": "connection_preference_value", + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "fingerprint": "fingerprint_value", + "id": 205, + "kind": "kind_value", + "name": "name_value", + "network": "network_value", + "producer_accept_lists": [ + "producer_accept_lists_value1", + "producer_accept_lists_value2", + ], + "producer_reject_lists": [ + "producer_reject_lists_value1", + "producer_reject_lists_value2", + ], + "region": "region_value", + "self_link": "self_link_value", + "self_link_with_id": "self_link_with_id_value", + "subnetworks": ["subnetworks_value1", "subnetworks_value2"], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchNetworkAttachmentRequest.meta.fields[ + "network_attachment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_attachment_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["network_attachment_resource"][field]) + ): + del request_init["network_attachment_resource"][field][i][subfield] + else: + del request_init["network_attachment_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_patch_rest_required_fields(request_type=compute.PatchNetworkAttachmentRequest): + transport_class = transports.NetworkAttachmentsRestTransport + + request_init = {} + request_init["network_attachment"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkAttachment"] = "network_attachment_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkAttachment" in jsonified_request + assert jsonified_request["networkAttachment"] == "network_attachment_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "networkAttachment", + "networkAttachmentResource", + "project", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkAttachmentsRestInterceptor(), + ) + client = NetworkAttachmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkAttachmentsRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.NetworkAttachmentsRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchNetworkAttachmentRequest.pb( + compute.PatchNetworkAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchNetworkAttachmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchNetworkAttachmentRequest +): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_attachment": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "network_attachment": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + network_attachment="network_attachment_value", + network_attachment_resource=compute.NetworkAttachment( + connection_endpoints=[ + compute.NetworkAttachmentConnectedEndpoint( + ip_address="ip_address_value" + ) + ] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/networkAttachments/{network_attachment}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchNetworkAttachmentRequest(), + project="project_value", + region="region_value", + network_attachment="network_attachment_value", + network_attachment_resource=compute.NetworkAttachment( + connection_endpoints=[ + compute.NetworkAttachmentConnectedEndpoint( + ip_address="ip_address_value" + ) + ] + ), + ) + + +def test_patch_rest_error(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchNetworkAttachmentRequest, + dict, + ], +) +def test_patch_unary_rest(request_type): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_attachment": "sample3", + } + request_init["network_attachment_resource"] = { + "connection_endpoints": [ + { + "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", + "project_id_or_num": "project_id_or_num_value", + "secondary_ip_cidr_ranges": [ + "secondary_ip_cidr_ranges_value1", + "secondary_ip_cidr_ranges_value2", + ], + "status": "status_value", + "subnetwork": "subnetwork_value", + "subnetwork_cidr_range": "subnetwork_cidr_range_value", + } + ], + "connection_preference": "connection_preference_value", + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "fingerprint": "fingerprint_value", + "id": 205, + "kind": "kind_value", + "name": "name_value", + "network": "network_value", + "producer_accept_lists": [ + "producer_accept_lists_value1", + "producer_accept_lists_value2", + ], + "producer_reject_lists": [ + "producer_reject_lists_value1", + "producer_reject_lists_value2", + ], + "region": "region_value", + "self_link": "self_link_value", + "self_link_with_id": "self_link_with_id_value", + "subnetworks": ["subnetworks_value1", "subnetworks_value2"], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchNetworkAttachmentRequest.meta.fields[ + "network_attachment_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_attachment_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["network_attachment_resource"][field]) + ): + del request_init["network_attachment_resource"][field][i][subfield] + else: + del request_init["network_attachment_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields( + request_type=compute.PatchNetworkAttachmentRequest, +): + transport_class = transports.NetworkAttachmentsRestTransport + + request_init = {} + request_init["network_attachment"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkAttachment"] = "network_attachment_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkAttachment" in jsonified_request + assert jsonified_request["networkAttachment"] == "network_attachment_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "networkAttachment", + "networkAttachmentResource", + "project", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkAttachmentsRestInterceptor(), + ) + client = NetworkAttachmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkAttachmentsRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.NetworkAttachmentsRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchNetworkAttachmentRequest.pb( + compute.PatchNetworkAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchNetworkAttachmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request( + transport: str = "rest", request_type=compute.PatchNetworkAttachmentRequest +): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_attachment": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "network_attachment": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + network_attachment="network_attachment_value", + network_attachment_resource=compute.NetworkAttachment( + connection_endpoints=[ + compute.NetworkAttachmentConnectedEndpoint( + ip_address="ip_address_value" + ) + ] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/networkAttachments/{network_attachment}" + % client.transport._host, + args[1], + ) + + +def test_patch_unary_rest_flattened_error(transport: str = "rest"): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchNetworkAttachmentRequest(), + project="project_value", + region="region_value", + network_attachment="network_attachment_value", + network_attachment_resource=compute.NetworkAttachment( + connection_endpoints=[ + compute.NetworkAttachmentConnectedEndpoint( + ip_address="ip_address_value" + ) + ] + ), + ) + + +def test_patch_unary_rest_error(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -4443,6 +5367,7 @@ def test_network_attachments_base_transport(): "get_iam_policy", "insert", "list", + "patch", "set_iam_policy", "test_iam_permissions", ) @@ -4600,6 +5525,9 @@ def test_network_attachments_client_transport_session_collision(transport_name): session1 = client1.transport.list._session session2 = client2.transport.list._session assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 session1 = client1.transport.set_iam_policy._session session2 = client2.transport.set_iam_policy._session assert session1 != session2 diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py index 6b5fbe1ad350..861f6d7add29 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py @@ -684,6 +684,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -750,6 +751,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py index 6474bae92268..d26fad5cb6da 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py @@ -674,6 +674,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -738,6 +739,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py index 77a864a3e613..b5e6caa3ab2e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py @@ -1467,6 +1467,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -1531,6 +1532,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py index dcb67f59a31e..3e93c1083840 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py @@ -646,6 +646,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -710,6 +711,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py index e549155ac8fd..fec82bb9105a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py @@ -628,6 +628,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -692,6 +693,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py index 084fb68aaf53..2bd1f8880756 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py @@ -666,6 +666,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -730,6 +731,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py index 4b008cf7b4e8..ea800ba036c1 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py @@ -595,11 +595,11 @@ def test_public_advertised_prefixes_client_client_options_credentials_file( @pytest.mark.parametrize( "request_type", [ - compute.DeletePublicAdvertisedPrefixeRequest, + compute.AnnouncePublicAdvertisedPrefixeRequest, dict, ], ) -def test_delete_rest(request_type): +def test_announce_rest(request_type): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -646,7 +646,7 @@ def test_delete_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete(request) + response = client.announce(request) # Establish that the response is the type that we expect. assert isinstance(response, extended_operation.ExtendedOperation) @@ -674,8 +674,8 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" -def test_delete_rest_required_fields( - request_type=compute.DeletePublicAdvertisedPrefixeRequest, +def test_announce_rest_required_fields( + request_type=compute.AnnouncePublicAdvertisedPrefixeRequest, ): transport_class = transports.PublicAdvertisedPrefixesRestTransport @@ -696,7 +696,7 @@ def test_delete_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).announce._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -706,7 +706,7 @@ def test_delete_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).announce._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -738,7 +738,7 @@ def test_delete_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } transcode.return_value = transcode_result @@ -753,19 +753,19 @@ def test_delete_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete(request) + response = client.announce(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_rest_unset_required_fields(): +def test_announce_rest_unset_required_fields(): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete._get_unset_required_fields({}) + unset_fields = transport.announce._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( @@ -778,7 +778,7 @@ def test_delete_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_rest_interceptors(null_interceptor): +def test_announce_rest_interceptors(null_interceptor): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -791,14 +791,14 @@ def test_delete_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "post_delete" + transports.PublicAdvertisedPrefixesRestInterceptor, "post_announce" ) as post, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "pre_delete" + transports.PublicAdvertisedPrefixesRestInterceptor, "pre_announce" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.DeletePublicAdvertisedPrefixeRequest.pb( - compute.DeletePublicAdvertisedPrefixeRequest() + pb_message = compute.AnnouncePublicAdvertisedPrefixeRequest.pb( + compute.AnnouncePublicAdvertisedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -812,7 +812,7 @@ def test_delete_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.DeletePublicAdvertisedPrefixeRequest() + request = compute.AnnouncePublicAdvertisedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -820,7 +820,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.delete( + client.announce( request, metadata=[ ("key", "val"), @@ -832,8 +832,8 @@ def test_delete_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_rest_bad_request( - transport: str = "rest", request_type=compute.DeletePublicAdvertisedPrefixeRequest +def test_announce_rest_bad_request( + transport: str = "rest", request_type=compute.AnnouncePublicAdvertisedPrefixeRequest ): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -853,10 +853,10 @@ def test_delete_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete(request) + client.announce(request) -def test_delete_rest_flattened(): +def test_announce_rest_flattened(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -886,20 +886,20 @@ def test_delete_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete(**mock_args) + client.announce(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" + "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}/announce" % client.transport._host, args[1], ) -def test_delete_rest_flattened_error(transport: str = "rest"): +def test_announce_rest_flattened_error(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -908,14 +908,14 @@ def test_delete_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete( - compute.DeletePublicAdvertisedPrefixeRequest(), + client.announce( + compute.AnnouncePublicAdvertisedPrefixeRequest(), project="project_value", public_advertised_prefix="public_advertised_prefix_value", ) -def test_delete_rest_error(): +def test_announce_rest_error(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -924,11 +924,11 @@ def test_delete_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.DeletePublicAdvertisedPrefixeRequest, + compute.AnnouncePublicAdvertisedPrefixeRequest, dict, ], ) -def test_delete_unary_rest(request_type): +def test_announce_unary_rest(request_type): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -975,14 +975,14 @@ def test_delete_unary_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_unary(request) + response = client.announce_unary(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Operation) -def test_delete_unary_rest_required_fields( - request_type=compute.DeletePublicAdvertisedPrefixeRequest, +def test_announce_unary_rest_required_fields( + request_type=compute.AnnouncePublicAdvertisedPrefixeRequest, ): transport_class = transports.PublicAdvertisedPrefixesRestTransport @@ -1003,7 +1003,7 @@ def test_delete_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).announce._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1013,7 +1013,7 @@ def test_delete_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).announce._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -1045,7 +1045,7 @@ def test_delete_unary_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } transcode.return_value = transcode_result @@ -1060,19 +1060,19 @@ def test_delete_unary_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_unary(request) + response = client.announce_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_unary_rest_unset_required_fields(): +def test_announce_unary_rest_unset_required_fields(): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete._get_unset_required_fields({}) + unset_fields = transport.announce._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( @@ -1085,7 +1085,7 @@ def test_delete_unary_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_unary_rest_interceptors(null_interceptor): +def test_announce_unary_rest_interceptors(null_interceptor): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1098,14 +1098,14 @@ def test_delete_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "post_delete" + transports.PublicAdvertisedPrefixesRestInterceptor, "post_announce" ) as post, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "pre_delete" + transports.PublicAdvertisedPrefixesRestInterceptor, "pre_announce" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.DeletePublicAdvertisedPrefixeRequest.pb( - compute.DeletePublicAdvertisedPrefixeRequest() + pb_message = compute.AnnouncePublicAdvertisedPrefixeRequest.pb( + compute.AnnouncePublicAdvertisedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -1119,7 +1119,7 @@ def test_delete_unary_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.DeletePublicAdvertisedPrefixeRequest() + request = compute.AnnouncePublicAdvertisedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -1127,7 +1127,7 @@ def test_delete_unary_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.delete_unary( + client.announce_unary( request, metadata=[ ("key", "val"), @@ -1139,8 +1139,8 @@ def test_delete_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_unary_rest_bad_request( - transport: str = "rest", request_type=compute.DeletePublicAdvertisedPrefixeRequest +def test_announce_unary_rest_bad_request( + transport: str = "rest", request_type=compute.AnnouncePublicAdvertisedPrefixeRequest ): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1160,10 +1160,10 @@ def test_delete_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_unary(request) + client.announce_unary(request) -def test_delete_unary_rest_flattened(): +def test_announce_unary_rest_flattened(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1193,20 +1193,20 @@ def test_delete_unary_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_unary(**mock_args) + client.announce_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" + "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}/announce" % client.transport._host, args[1], ) -def test_delete_unary_rest_flattened_error(transport: str = "rest"): +def test_announce_unary_rest_flattened_error(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1215,14 +1215,14 @@ def test_delete_unary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_unary( - compute.DeletePublicAdvertisedPrefixeRequest(), + client.announce_unary( + compute.AnnouncePublicAdvertisedPrefixeRequest(), project="project_value", public_advertised_prefix="public_advertised_prefix_value", ) -def test_delete_unary_rest_error(): +def test_announce_unary_rest_error(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1231,11 +1231,11 @@ def test_delete_unary_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.GetPublicAdvertisedPrefixeRequest, + compute.DeletePublicAdvertisedPrefixeRequest, dict, ], ) -def test_get_rest(request_type): +def test_delete_rest(request_type): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1248,48 +1248,70 @@ def test_get_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.PublicAdvertisedPrefix( + return_value = compute.Operation( + client_operation_id="client_operation_id_value", creation_timestamp="creation_timestamp_value", description="description_value", - dns_verification_ip="dns_verification_ip_value", - fingerprint="fingerprint_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, id=205, - ip_cidr_range="ip_cidr_range_value", + insert_time="insert_time_value", kind="kind_value", name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", self_link="self_link_value", - shared_secret="shared_secret_value", - status="status_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.PublicAdvertisedPrefix.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get(request) + response = client.delete(request) # Establish that the response is the type that we expect. - assert isinstance(response, compute.PublicAdvertisedPrefix) + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" - assert response.dns_verification_ip == "dns_verification_ip_value" - assert response.fingerprint == "fingerprint_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 assert response.id == 205 - assert response.ip_cidr_range == "ip_cidr_range_value" + assert response.insert_time == "insert_time_value" assert response.kind == "kind_value" assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.shared_secret == "shared_secret_value" - assert response.status == "status_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" -def test_get_rest_required_fields( - request_type=compute.GetPublicAdvertisedPrefixeRequest, +def test_delete_rest_required_fields( + request_type=compute.DeletePublicAdvertisedPrefixeRequest, ): transport_class = transports.PublicAdvertisedPrefixesRestTransport @@ -1310,7 +1332,7 @@ def test_get_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get._get_unset_required_fields(jsonified_request) + ).delete._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1320,7 +1342,9 @@ def test_get_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get._get_unset_required_fields(jsonified_request) + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -1338,7 +1362,7 @@ def test_get_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.PublicAdvertisedPrefix() + return_value = compute.Operation() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1350,7 +1374,7 @@ def test_get_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result @@ -1359,27 +1383,27 @@ def test_get_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.PublicAdvertisedPrefix.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get(request) + response = client.delete(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_rest_unset_required_fields(): +def test_delete_rest_unset_required_fields(): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get._get_unset_required_fields({}) + unset_fields = transport.delete._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("requestId",)) & set( ( "project", @@ -1390,7 +1414,7 @@ def test_get_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_rest_interceptors(null_interceptor): +def test_delete_rest_interceptors(null_interceptor): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1403,14 +1427,14 @@ def test_get_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "post_get" + transports.PublicAdvertisedPrefixesRestInterceptor, "post_delete" ) as post, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "pre_get" + transports.PublicAdvertisedPrefixesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.GetPublicAdvertisedPrefixeRequest.pb( - compute.GetPublicAdvertisedPrefixeRequest() + pb_message = compute.DeletePublicAdvertisedPrefixeRequest.pb( + compute.DeletePublicAdvertisedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -1422,19 +1446,17 @@ def test_get_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.PublicAdvertisedPrefix.to_json( - compute.PublicAdvertisedPrefix() - ) + req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.GetPublicAdvertisedPrefixeRequest() + request = compute.DeletePublicAdvertisedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.PublicAdvertisedPrefix() + post.return_value = compute.Operation() - client.get( + client.delete( request, metadata=[ ("key", "val"), @@ -1446,8 +1468,8 @@ def test_get_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_rest_bad_request( - transport: str = "rest", request_type=compute.GetPublicAdvertisedPrefixeRequest +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeletePublicAdvertisedPrefixeRequest ): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1467,10 +1489,10 @@ def test_get_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get(request) + client.delete(request) -def test_get_rest_flattened(): +def test_delete_rest_flattened(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1479,7 +1501,7 @@ def test_get_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.PublicAdvertisedPrefix() + return_value = compute.Operation() # get arguments that satisfy an http rule for this method sample_request = {"project": "sample1", "public_advertised_prefix": "sample2"} @@ -1495,12 +1517,12 @@ def test_get_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.PublicAdvertisedPrefix.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get(**mock_args) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -1513,7 +1535,7 @@ def test_get_rest_flattened(): ) -def test_get_rest_flattened_error(transport: str = "rest"): +def test_delete_rest_flattened_error(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1522,14 +1544,14 @@ def test_get_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get( - compute.GetPublicAdvertisedPrefixeRequest(), + client.delete( + compute.DeletePublicAdvertisedPrefixeRequest(), project="project_value", public_advertised_prefix="public_advertised_prefix_value", ) -def test_get_rest_error(): +def test_delete_rest_error(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1538,189 +1560,71 @@ def test_get_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.InsertPublicAdvertisedPrefixeRequest, + compute.DeletePublicAdvertisedPrefixeRequest, dict, ], ) -def test_insert_rest(request_type): +def test_delete_unary_rest(request_type): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1"} - request_init["public_advertised_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "dns_verification_ip": "dns_verification_ip_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "kind": "kind_value", - "name": "name_value", - "public_delegated_prefixs": [ - { - "ip_range": "ip_range_value", - "name": "name_value", - "project": "project_value", - "region": "region_value", - "status": "status_value", - } - ], - "self_link": "self_link_value", - "shared_secret": "shared_secret_value", - "status": "status_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = compute.InsertPublicAdvertisedPrefixeRequest.meta.fields[ - "public_advertised_prefix_resource" - ] + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} + request = request_type(**request_init) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "public_advertised_prefix_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["public_advertised_prefix_resource"][field]) - ): - del request_init["public_advertised_prefix_resource"][field][i][ - subfield - ] - else: - del request_init["public_advertised_prefix_resource"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.Operation( - client_operation_id="client_operation_id_value", - creation_timestamp="creation_timestamp_value", - description="description_value", - end_time="end_time_value", - http_error_message="http_error_message_value", - http_error_status_code=2374, - id=205, - insert_time="insert_time_value", - kind="kind_value", - name="name_value", - operation_group_id="operation_group_id_value", - operation_type="operation_type_value", - progress=885, - region="region_value", - self_link="self_link_value", - start_time="start_time_value", - status=compute.Operation.Status.DONE, - status_message="status_message_value", - target_id=947, - target_link="target_link_value", - user="user_value", - zone="zone_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.insert(request) + response = client.delete_unary(request) # Establish that the response is the type that we expect. - assert isinstance(response, extended_operation.ExtendedOperation) - assert response.client_operation_id == "client_operation_id_value" - assert response.creation_timestamp == "creation_timestamp_value" - assert response.description == "description_value" - assert response.end_time == "end_time_value" - assert response.http_error_message == "http_error_message_value" - assert response.http_error_status_code == 2374 - assert response.id == 205 - assert response.insert_time == "insert_time_value" - assert response.kind == "kind_value" - assert response.name == "name_value" - assert response.operation_group_id == "operation_group_id_value" - assert response.operation_type == "operation_type_value" - assert response.progress == 885 - assert response.region == "region_value" - assert response.self_link == "self_link_value" - assert response.start_time == "start_time_value" - assert response.status == compute.Operation.Status.DONE - assert response.status_message == "status_message_value" - assert response.target_id == 947 - assert response.target_link == "target_link_value" - assert response.user == "user_value" - assert response.zone == "zone_value" + assert isinstance(response, compute.Operation) -def test_insert_rest_required_fields( - request_type=compute.InsertPublicAdvertisedPrefixeRequest, +def test_delete_unary_rest_required_fields( + request_type=compute.DeletePublicAdvertisedPrefixeRequest, ): transport_class = transports.PublicAdvertisedPrefixesRestTransport request_init = {} request_init["project"] = "" + request_init["public_advertised_prefix"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -1735,16 +1639,17 @@ def test_insert_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).insert._get_unset_required_fields(jsonified_request) + ).delete._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["project"] = "project_value" + jsonified_request["publicAdvertisedPrefix"] = "public_advertised_prefix_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).insert._get_unset_required_fields(jsonified_request) + ).delete._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -1752,6 +1657,10 @@ def test_insert_rest_required_fields( # verify required fields with non-default values are left alone assert "project" in jsonified_request assert jsonified_request["project"] == "project_value" + assert "publicAdvertisedPrefix" in jsonified_request + assert ( + jsonified_request["publicAdvertisedPrefix"] == "public_advertised_prefix_value" + ) client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1772,10 +1681,9 @@ def test_insert_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -1788,32 +1696,32 @@ def test_insert_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.insert(request) + response = client.delete_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_insert_rest_unset_required_fields(): +def test_delete_unary_rest_unset_required_fields(): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.insert._get_unset_required_fields({}) + unset_fields = transport.delete._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( ( "project", - "publicAdvertisedPrefixResource", + "publicAdvertisedPrefix", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_insert_rest_interceptors(null_interceptor): +def test_delete_unary_rest_interceptors(null_interceptor): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1826,14 +1734,14 @@ def test_insert_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "post_insert" + transports.PublicAdvertisedPrefixesRestInterceptor, "post_delete" ) as post, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "pre_insert" + transports.PublicAdvertisedPrefixesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.InsertPublicAdvertisedPrefixeRequest.pb( - compute.InsertPublicAdvertisedPrefixeRequest() + pb_message = compute.DeletePublicAdvertisedPrefixeRequest.pb( + compute.DeletePublicAdvertisedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -1847,7 +1755,7 @@ def test_insert_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.InsertPublicAdvertisedPrefixeRequest() + request = compute.DeletePublicAdvertisedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -1855,7 +1763,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.insert( + client.delete_unary( request, metadata=[ ("key", "val"), @@ -1867,8 +1775,8 @@ def test_insert_rest_interceptors(null_interceptor): post.assert_called_once() -def test_insert_rest_bad_request( - transport: str = "rest", request_type=compute.InsertPublicAdvertisedPrefixeRequest +def test_delete_unary_rest_bad_request( + transport: str = "rest", request_type=compute.DeletePublicAdvertisedPrefixeRequest ): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1876,7 +1784,7 @@ def test_insert_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"project": "sample1"} + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1888,10 +1796,10 @@ def test_insert_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.insert(request) + client.delete_unary(request) -def test_insert_rest_flattened(): +def test_delete_unary_rest_flattened(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1903,14 +1811,12 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # get arguments that satisfy an http rule for this method - sample_request = {"project": "sample1"} + sample_request = {"project": "sample1", "public_advertised_prefix": "sample2"} # get truthy value for each flattened field mock_args = dict( project="project_value", - public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( - creation_timestamp="creation_timestamp_value" - ), + public_advertised_prefix="public_advertised_prefix_value", ) mock_args.update(sample_request) @@ -1923,20 +1829,20 @@ def test_insert_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.insert(**mock_args) + client.delete_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes" + "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" % client.transport._host, args[1], ) -def test_insert_rest_flattened_error(transport: str = "rest"): +def test_delete_unary_rest_flattened_error(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1945,16 +1851,14 @@ def test_insert_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.insert( - compute.InsertPublicAdvertisedPrefixeRequest(), + client.delete_unary( + compute.DeletePublicAdvertisedPrefixeRequest(), project="project_value", - public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( - creation_timestamp="creation_timestamp_value" - ), + public_advertised_prefix="public_advertised_prefix_value", ) -def test_insert_rest_error(): +def test_delete_unary_rest_error(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1963,167 +1867,75 @@ def test_insert_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.InsertPublicAdvertisedPrefixeRequest, + compute.GetPublicAdvertisedPrefixeRequest, dict, ], ) -def test_insert_unary_rest(request_type): +def test_get_rest(request_type): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1"} - request_init["public_advertised_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "dns_verification_ip": "dns_verification_ip_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "kind": "kind_value", - "name": "name_value", - "public_delegated_prefixs": [ - { - "ip_range": "ip_range_value", - "name": "name_value", - "project": "project_value", - "region": "region_value", - "status": "status_value", - } - ], - "self_link": "self_link_value", - "shared_secret": "shared_secret_value", - "status": "status_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} + request = request_type(**request_init) - # Determine if the message type is proto-plus or protobuf - test_field = compute.InsertPublicAdvertisedPrefixeRequest.meta.fields[ - "public_advertised_prefix_resource" - ] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicAdvertisedPrefix( + byoip_api_version="byoip_api_version_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + dns_verification_ip="dns_verification_ip_value", + fingerprint="fingerprint_value", + id=205, + ip_cidr_range="ip_cidr_range_value", + kind="kind_value", + name="name_value", + pdp_scope="pdp_scope_value", + self_link="self_link_value", + shared_secret="shared_secret_value", + status="status_value", + ) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.PublicAdvertisedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "public_advertised_prefix_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["public_advertised_prefix_resource"][field]) - ): - del request_init["public_advertised_prefix_resource"][field][i][ - subfield - ] - else: - del request_init["public_advertised_prefix_resource"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.Operation( - client_operation_id="client_operation_id_value", - creation_timestamp="creation_timestamp_value", - description="description_value", - end_time="end_time_value", - http_error_message="http_error_message_value", - http_error_status_code=2374, - id=205, - insert_time="insert_time_value", - kind="kind_value", - name="name_value", - operation_group_id="operation_group_id_value", - operation_type="operation_type_value", - progress=885, - region="region_value", - self_link="self_link_value", - start_time="start_time_value", - status=compute.Operation.Status.DONE, - status_message="status_message_value", - target_id=947, - target_link="target_link_value", - user="user_value", - zone="zone_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.insert_unary(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get(request) # Establish that the response is the type that we expect. - assert isinstance(response, compute.Operation) + assert isinstance(response, compute.PublicAdvertisedPrefix) + assert response.byoip_api_version == "byoip_api_version_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.dns_verification_ip == "dns_verification_ip_value" + assert response.fingerprint == "fingerprint_value" + assert response.id == 205 + assert response.ip_cidr_range == "ip_cidr_range_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.pdp_scope == "pdp_scope_value" + assert response.self_link == "self_link_value" + assert response.shared_secret == "shared_secret_value" + assert response.status == "status_value" -def test_insert_unary_rest_required_fields( - request_type=compute.InsertPublicAdvertisedPrefixeRequest, +def test_get_rest_required_fields( + request_type=compute.GetPublicAdvertisedPrefixeRequest, ): transport_class = transports.PublicAdvertisedPrefixesRestTransport request_init = {} request_init["project"] = "" + request_init["public_advertised_prefix"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -2138,23 +1950,26 @@ def test_insert_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).insert._get_unset_required_fields(jsonified_request) + ).get._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["project"] = "project_value" + jsonified_request["publicAdvertisedPrefix"] = "public_advertised_prefix_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).insert._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + ).get._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "project" in jsonified_request assert jsonified_request["project"] == "project_value" + assert "publicAdvertisedPrefix" in jsonified_request + assert ( + jsonified_request["publicAdvertisedPrefix"] == "public_advertised_prefix_value" + ) client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2163,7 +1978,7 @@ def test_insert_unary_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.Operation() + return_value = compute.PublicAdvertisedPrefix() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2175,48 +1990,47 @@ def test_insert_unary_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.PublicAdvertisedPrefix.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.insert_unary(request) + response = client.get(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_insert_unary_rest_unset_required_fields(): +def test_get_rest_unset_required_fields(): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.insert._get_unset_required_fields({}) + unset_fields = transport.get._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) + set(()) & set( ( "project", - "publicAdvertisedPrefixResource", + "publicAdvertisedPrefix", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_insert_unary_rest_interceptors(null_interceptor): +def test_get_rest_interceptors(null_interceptor): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2229,14 +2043,14 @@ def test_insert_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "post_insert" + transports.PublicAdvertisedPrefixesRestInterceptor, "post_get" ) as post, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "pre_insert" + transports.PublicAdvertisedPrefixesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.InsertPublicAdvertisedPrefixeRequest.pb( - compute.InsertPublicAdvertisedPrefixeRequest() + pb_message = compute.GetPublicAdvertisedPrefixeRequest.pb( + compute.GetPublicAdvertisedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -2248,17 +2062,19 @@ def test_insert_unary_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.Operation.to_json(compute.Operation()) + req.return_value._content = compute.PublicAdvertisedPrefix.to_json( + compute.PublicAdvertisedPrefix() + ) - request = compute.InsertPublicAdvertisedPrefixeRequest() + request = compute.GetPublicAdvertisedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.Operation() + post.return_value = compute.PublicAdvertisedPrefix() - client.insert_unary( + client.get( request, metadata=[ ("key", "val"), @@ -2270,8 +2086,8 @@ def test_insert_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_insert_unary_rest_bad_request( - transport: str = "rest", request_type=compute.InsertPublicAdvertisedPrefixeRequest +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetPublicAdvertisedPrefixeRequest ): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2279,7 +2095,7 @@ def test_insert_unary_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"project": "sample1"} + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2291,10 +2107,10 @@ def test_insert_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.insert_unary(request) + client.get(request) -def test_insert_unary_rest_flattened(): +def test_get_rest_flattened(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2303,17 +2119,15 @@ def test_insert_unary_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Operation() + return_value = compute.PublicAdvertisedPrefix() # get arguments that satisfy an http rule for this method - sample_request = {"project": "sample1"} + sample_request = {"project": "sample1", "public_advertised_prefix": "sample2"} # get truthy value for each flattened field mock_args = dict( project="project_value", - public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( - creation_timestamp="creation_timestamp_value" - ), + public_advertised_prefix="public_advertised_prefix_value", ) mock_args.update(sample_request) @@ -2321,25 +2135,25 @@ def test_insert_unary_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.PublicAdvertisedPrefix.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.insert_unary(**mock_args) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes" + "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" % client.transport._host, args[1], ) -def test_insert_unary_rest_flattened_error(transport: str = "rest"): +def test_get_rest_flattened_error(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2348,16 +2162,14 @@ def test_insert_unary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.insert_unary( - compute.InsertPublicAdvertisedPrefixeRequest(), + client.get( + compute.GetPublicAdvertisedPrefixeRequest(), project="project_value", - public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( - creation_timestamp="creation_timestamp_value" - ), + public_advertised_prefix="public_advertised_prefix_value", ) -def test_insert_unary_rest_error(): +def test_get_rest_error(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2366,11 +2178,11 @@ def test_insert_unary_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.ListPublicAdvertisedPrefixesRequest, + compute.InsertPublicAdvertisedPrefixeRequest, dict, ], ) -def test_list_rest(request_type): +def test_insert_rest(request_type): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2378,39 +2190,174 @@ def test_list_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.PublicAdvertisedPrefixList( - id="id_value", - kind="kind_value", - next_page_token="next_page_token_value", - self_link="self_link_value", - ) + request_init["public_advertised_prefix_resource"] = { + "byoip_api_version": "byoip_api_version_value", + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "dns_verification_ip": "dns_verification_ip_value", + "fingerprint": "fingerprint_value", + "id": 205, + "ip_cidr_range": "ip_cidr_range_value", + "kind": "kind_value", + "name": "name_value", + "pdp_scope": "pdp_scope_value", + "public_delegated_prefixs": [ + { + "ip_range": "ip_range_value", + "name": "name_value", + "project": "project_value", + "region": "region_value", + "status": "status_value", + } + ], + "self_link": "self_link_value", + "shared_secret": "shared_secret_value", + "status": "status_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = compute.PublicAdvertisedPrefixList.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertPublicAdvertisedPrefixeRequest.meta.fields[ + "public_advertised_prefix_resource" + ] - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list(request) + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_advertised_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_advertised_prefix_resource"][field]) + ): + del request_init["public_advertised_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_advertised_prefix_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.insert(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPager) - assert response.id == "id_value" + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" assert response.kind == "kind_value" - assert response.next_page_token == "next_page_token_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" -def test_list_rest_required_fields( - request_type=compute.ListPublicAdvertisedPrefixesRequest, +def test_insert_rest_required_fields( + request_type=compute.InsertPublicAdvertisedPrefixeRequest, ): transport_class = transports.PublicAdvertisedPrefixesRestTransport @@ -2430,7 +2377,7 @@ def test_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list._get_unset_required_fields(jsonified_request) + ).insert._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2439,17 +2386,9 @@ def test_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list._get_unset_required_fields(jsonified_request) + ).insert._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "max_results", - "order_by", - "page_token", - "return_partial_success", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2463,7 +2402,7 @@ def test_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.PublicAdvertisedPrefixList() + return_value = compute.Operation() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2475,50 +2414,48 @@ def test_list_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.PublicAdvertisedPrefixList.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list(request) + response = client.insert(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_rest_unset_required_fields(): +def test_insert_rest_unset_required_fields(): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list._get_unset_required_fields({}) + unset_fields = transport.insert._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("requestId",)) + & set( ( - "filter", - "maxResults", - "orderBy", - "pageToken", - "returnPartialSuccess", + "project", + "publicAdvertisedPrefixResource", ) ) - & set(("project",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_rest_interceptors(null_interceptor): +def test_insert_rest_interceptors(null_interceptor): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2531,14 +2468,14 @@ def test_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "post_list" + transports.PublicAdvertisedPrefixesRestInterceptor, "post_insert" ) as post, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "pre_list" + transports.PublicAdvertisedPrefixesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.ListPublicAdvertisedPrefixesRequest.pb( - compute.ListPublicAdvertisedPrefixesRequest() + pb_message = compute.InsertPublicAdvertisedPrefixeRequest.pb( + compute.InsertPublicAdvertisedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -2550,19 +2487,17 @@ def test_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.PublicAdvertisedPrefixList.to_json( - compute.PublicAdvertisedPrefixList() - ) + req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.ListPublicAdvertisedPrefixesRequest() + request = compute.InsertPublicAdvertisedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.PublicAdvertisedPrefixList() + post.return_value = compute.Operation() - client.list( + client.insert( request, metadata=[ ("key", "val"), @@ -2574,8 +2509,8 @@ def test_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_rest_bad_request( - transport: str = "rest", request_type=compute.ListPublicAdvertisedPrefixesRequest +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertPublicAdvertisedPrefixeRequest ): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2595,10 +2530,10 @@ def test_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list(request) + client.insert(request) -def test_list_rest_flattened(): +def test_insert_rest_flattened(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2607,7 +2542,7 @@ def test_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.PublicAdvertisedPrefixList() + return_value = compute.Operation() # get arguments that satisfy an http rule for this method sample_request = {"project": "sample1"} @@ -2615,6 +2550,9 @@ def test_list_rest_flattened(): # get truthy value for each flattened field mock_args = dict( project="project_value", + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( + byoip_api_version="byoip_api_version_value" + ), ) mock_args.update(sample_request) @@ -2622,12 +2560,12 @@ def test_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.PublicAdvertisedPrefixList.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list(**mock_args) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -2640,7 +2578,7 @@ def test_list_rest_flattened(): ) -def test_list_rest_flattened_error(transport: str = "rest"): +def test_insert_rest_flattened_error(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2649,91 +2587,38 @@ def test_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list( - compute.ListPublicAdvertisedPrefixesRequest(), + client.insert( + compute.InsertPublicAdvertisedPrefixeRequest(), project="project_value", + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( + byoip_api_version="byoip_api_version_value" + ), ) -def test_list_rest_pager(transport: str = "rest"): +def test_insert_rest_error(): client = PublicAdvertisedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - compute.PublicAdvertisedPrefixList( - items=[ - compute.PublicAdvertisedPrefix(), - compute.PublicAdvertisedPrefix(), - compute.PublicAdvertisedPrefix(), - ], - next_page_token="abc", - ), - compute.PublicAdvertisedPrefixList( - items=[], - next_page_token="def", - ), - compute.PublicAdvertisedPrefixList( - items=[ - compute.PublicAdvertisedPrefix(), - ], - next_page_token="ghi", - ), - compute.PublicAdvertisedPrefixList( - items=[ - compute.PublicAdvertisedPrefix(), - compute.PublicAdvertisedPrefix(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - compute.PublicAdvertisedPrefixList.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"project": "sample1"} - - pager = client.list(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, compute.PublicAdvertisedPrefix) for i in results) - - pages = list(client.list(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - compute.PatchPublicAdvertisedPrefixeRequest, + compute.InsertPublicAdvertisedPrefixeRequest, dict, ], ) -def test_patch_rest(request_type): +def test_insert_unary_rest(request_type): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} + request_init = {"project": "sample1"} request_init["public_advertised_prefix_resource"] = { + "byoip_api_version": "byoip_api_version_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", "dns_verification_ip": "dns_verification_ip_value", @@ -2742,6 +2627,7 @@ def test_patch_rest(request_type): "ip_cidr_range": "ip_cidr_range_value", "kind": "kind_value", "name": "name_value", + "pdp_scope": "pdp_scope_value", "public_delegated_prefixs": [ { "ip_range": "ip_range_value", @@ -2760,7 +2646,7 @@ def test_patch_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = compute.PatchPublicAdvertisedPrefixeRequest.meta.fields[ + test_field = compute.InsertPublicAdvertisedPrefixeRequest.meta.fields[ "public_advertised_prefix_resource" ] @@ -2770,66 +2656,1577 @@ def get_message_fields(field): # If the field is not a composite type, return an empty list. message_fields = [] - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_advertised_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_advertised_prefix_resource"][field]) + ): + del request_init["public_advertised_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_advertised_prefix_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields( + request_type=compute.InsertPublicAdvertisedPrefixeRequest, +): + transport_class = transports.PublicAdvertisedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "publicAdvertisedPrefixResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PublicAdvertisedPrefixesRestInterceptor(), + ) + client = PublicAdvertisedPrefixesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, "post_insert" + ) as post, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, "pre_insert" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertPublicAdvertisedPrefixeRequest.pb( + compute.InsertPublicAdvertisedPrefixeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertPublicAdvertisedPrefixeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request( + transport: str = "rest", request_type=compute.InsertPublicAdvertisedPrefixeRequest +): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( + byoip_api_version="byoip_api_version_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes" + % client.transport._host, + args[1], + ) + + +def test_insert_unary_rest_flattened_error(transport: str = "rest"): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertPublicAdvertisedPrefixeRequest(), + project="project_value", + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( + byoip_api_version="byoip_api_version_value" + ), + ) + + +def test_insert_unary_rest_error(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListPublicAdvertisedPrefixesRequest, + dict, + ], +) +def test_list_rest(request_type): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicAdvertisedPrefixList( + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.PublicAdvertisedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + + +def test_list_rest_required_fields( + request_type=compute.ListPublicAdvertisedPrefixesRequest, +): + transport_class = transports.PublicAdvertisedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.PublicAdvertisedPrefixList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.PublicAdvertisedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set(("project",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PublicAdvertisedPrefixesRestInterceptor(), + ) + client = PublicAdvertisedPrefixesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, "post_list" + ) as post, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, "pre_list" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListPublicAdvertisedPrefixesRequest.pb( + compute.ListPublicAdvertisedPrefixesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.PublicAdvertisedPrefixList.to_json( + compute.PublicAdvertisedPrefixList() + ) + + request = compute.ListPublicAdvertisedPrefixesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.PublicAdvertisedPrefixList() + + client.list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListPublicAdvertisedPrefixesRequest +): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicAdvertisedPrefixList() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.PublicAdvertisedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes" + % client.transport._host, + args[1], + ) + + +def test_list_rest_flattened_error(transport: str = "rest"): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListPublicAdvertisedPrefixesRequest(), + project="project_value", + ) + + +def test_list_rest_pager(transport: str = "rest"): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.PublicAdvertisedPrefixList( + items=[ + compute.PublicAdvertisedPrefix(), + compute.PublicAdvertisedPrefix(), + compute.PublicAdvertisedPrefix(), + ], + next_page_token="abc", + ), + compute.PublicAdvertisedPrefixList( + items=[], + next_page_token="def", + ), + compute.PublicAdvertisedPrefixList( + items=[ + compute.PublicAdvertisedPrefix(), + ], + next_page_token="ghi", + ), + compute.PublicAdvertisedPrefixList( + items=[ + compute.PublicAdvertisedPrefix(), + compute.PublicAdvertisedPrefix(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + compute.PublicAdvertisedPrefixList.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1"} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.PublicAdvertisedPrefix) for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchPublicAdvertisedPrefixeRequest, + dict, + ], +) +def test_patch_rest(request_type): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} + request_init["public_advertised_prefix_resource"] = { + "byoip_api_version": "byoip_api_version_value", + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "dns_verification_ip": "dns_verification_ip_value", + "fingerprint": "fingerprint_value", + "id": 205, + "ip_cidr_range": "ip_cidr_range_value", + "kind": "kind_value", + "name": "name_value", + "pdp_scope": "pdp_scope_value", + "public_delegated_prefixs": [ + { + "ip_range": "ip_range_value", + "name": "name_value", + "project": "project_value", + "region": "region_value", + "status": "status_value", + } + ], + "self_link": "self_link_value", + "shared_secret": "shared_secret_value", + "status": "status_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchPublicAdvertisedPrefixeRequest.meta.fields[ + "public_advertised_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_advertised_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_advertised_prefix_resource"][field]) + ): + del request_init["public_advertised_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_advertised_prefix_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_patch_rest_required_fields( + request_type=compute.PatchPublicAdvertisedPrefixeRequest, +): + transport_class = transports.PublicAdvertisedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_advertised_prefix"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["publicAdvertisedPrefix"] = "public_advertised_prefix_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "publicAdvertisedPrefix" in jsonified_request + assert ( + jsonified_request["publicAdvertisedPrefix"] == "public_advertised_prefix_value" + ) + + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "publicAdvertisedPrefix", + "publicAdvertisedPrefixResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PublicAdvertisedPrefixesRestInterceptor(), + ) + client = PublicAdvertisedPrefixesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchPublicAdvertisedPrefixeRequest.pb( + compute.PatchPublicAdvertisedPrefixeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchPublicAdvertisedPrefixeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchPublicAdvertisedPrefixeRequest +): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "public_advertised_prefix": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( + byoip_api_version="byoip_api_version_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchPublicAdvertisedPrefixeRequest(), + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( + byoip_api_version="byoip_api_version_value" + ), + ) + + +def test_patch_rest_error(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchPublicAdvertisedPrefixeRequest, + dict, + ], +) +def test_patch_unary_rest(request_type): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} + request_init["public_advertised_prefix_resource"] = { + "byoip_api_version": "byoip_api_version_value", + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "dns_verification_ip": "dns_verification_ip_value", + "fingerprint": "fingerprint_value", + "id": 205, + "ip_cidr_range": "ip_cidr_range_value", + "kind": "kind_value", + "name": "name_value", + "pdp_scope": "pdp_scope_value", + "public_delegated_prefixs": [ + { + "ip_range": "ip_range_value", + "name": "name_value", + "project": "project_value", + "region": "region_value", + "status": "status_value", + } + ], + "self_link": "self_link_value", + "shared_secret": "shared_secret_value", + "status": "status_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchPublicAdvertisedPrefixeRequest.meta.fields[ + "public_advertised_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_advertised_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_advertised_prefix_resource"][field]) + ): + del request_init["public_advertised_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_advertised_prefix_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields( + request_type=compute.PatchPublicAdvertisedPrefixeRequest, +): + transport_class = transports.PublicAdvertisedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_advertised_prefix"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["publicAdvertisedPrefix"] = "public_advertised_prefix_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "publicAdvertisedPrefix" in jsonified_request + assert ( + jsonified_request["publicAdvertisedPrefix"] == "public_advertised_prefix_value" + ) + + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "publicAdvertisedPrefix", + "publicAdvertisedPrefixResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PublicAdvertisedPrefixesRestInterceptor(), + ) + client = PublicAdvertisedPrefixesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchPublicAdvertisedPrefixeRequest.pb( + compute.PatchPublicAdvertisedPrefixeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchPublicAdvertisedPrefixeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request( + transport: str = "rest", request_type=compute.PatchPublicAdvertisedPrefixeRequest +): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "public_advertised_prefix": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( + byoip_api_version="byoip_api_version_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" + % client.transport._host, + args[1], + ) + - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields +def test_patch_unary_rest_flattened_error(transport: str = "rest"): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchPublicAdvertisedPrefixeRequest(), + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( + byoip_api_version="byoip_api_version_value" + ), + ) - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "public_advertised_prefix_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_patch_unary_rest_error(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["public_advertised_prefix_resource"][field]) - ): - del request_init["public_advertised_prefix_resource"][field][i][ - subfield - ] - else: - del request_init["public_advertised_prefix_resource"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + compute.WithdrawPublicAdvertisedPrefixeRequest, + dict, + ], +) +def test_withdraw_rest(request_type): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2869,7 +4266,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch(request) + response = client.withdraw(request) # Establish that the response is the type that we expect. assert isinstance(response, extended_operation.ExtendedOperation) @@ -2897,8 +4294,8 @@ def get_message_fields(field): assert response.zone == "zone_value" -def test_patch_rest_required_fields( - request_type=compute.PatchPublicAdvertisedPrefixeRequest, +def test_withdraw_rest_required_fields( + request_type=compute.WithdrawPublicAdvertisedPrefixeRequest, ): transport_class = transports.PublicAdvertisedPrefixesRestTransport @@ -2919,7 +4316,7 @@ def test_patch_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).withdraw._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2929,7 +4326,7 @@ def test_patch_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).withdraw._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -2961,10 +4358,9 @@ def test_patch_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -2977,33 +4373,32 @@ def test_patch_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch(request) + response = client.withdraw(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_patch_rest_unset_required_fields(): +def test_withdraw_rest_unset_required_fields(): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.patch._get_unset_required_fields({}) + unset_fields = transport.withdraw._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( ( "project", "publicAdvertisedPrefix", - "publicAdvertisedPrefixResource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_patch_rest_interceptors(null_interceptor): +def test_withdraw_rest_interceptors(null_interceptor): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3016,14 +4411,14 @@ def test_patch_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "post_patch" + transports.PublicAdvertisedPrefixesRestInterceptor, "post_withdraw" ) as post, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "pre_patch" + transports.PublicAdvertisedPrefixesRestInterceptor, "pre_withdraw" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.PatchPublicAdvertisedPrefixeRequest.pb( - compute.PatchPublicAdvertisedPrefixeRequest() + pb_message = compute.WithdrawPublicAdvertisedPrefixeRequest.pb( + compute.WithdrawPublicAdvertisedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -3037,7 +4432,7 @@ def test_patch_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.PatchPublicAdvertisedPrefixeRequest() + request = compute.WithdrawPublicAdvertisedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -3045,7 +4440,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.patch( + client.withdraw( request, metadata=[ ("key", "val"), @@ -3057,8 +4452,8 @@ def test_patch_rest_interceptors(null_interceptor): post.assert_called_once() -def test_patch_rest_bad_request( - transport: str = "rest", request_type=compute.PatchPublicAdvertisedPrefixeRequest +def test_withdraw_rest_bad_request( + transport: str = "rest", request_type=compute.WithdrawPublicAdvertisedPrefixeRequest ): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3078,10 +4473,10 @@ def test_patch_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.patch(request) + client.withdraw(request) -def test_patch_rest_flattened(): +def test_withdraw_rest_flattened(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3099,9 +4494,6 @@ def test_patch_rest_flattened(): mock_args = dict( project="project_value", public_advertised_prefix="public_advertised_prefix_value", - public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( - creation_timestamp="creation_timestamp_value" - ), ) mock_args.update(sample_request) @@ -3114,20 +4506,20 @@ def test_patch_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.patch(**mock_args) + client.withdraw(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" + "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}/withdraw" % client.transport._host, args[1], ) -def test_patch_rest_flattened_error(transport: str = "rest"): +def test_withdraw_rest_flattened_error(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3136,17 +4528,14 @@ def test_patch_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.patch( - compute.PatchPublicAdvertisedPrefixeRequest(), + client.withdraw( + compute.WithdrawPublicAdvertisedPrefixeRequest(), project="project_value", public_advertised_prefix="public_advertised_prefix_value", - public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( - creation_timestamp="creation_timestamp_value" - ), ) -def test_patch_rest_error(): +def test_withdraw_rest_error(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -3155,11 +4544,11 @@ def test_patch_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.PatchPublicAdvertisedPrefixeRequest, + compute.WithdrawPublicAdvertisedPrefixeRequest, dict, ], ) -def test_patch_unary_rest(request_type): +def test_withdraw_unary_rest(request_type): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3167,103 +4556,6 @@ def test_patch_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} - request_init["public_advertised_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "dns_verification_ip": "dns_verification_ip_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "kind": "kind_value", - "name": "name_value", - "public_delegated_prefixs": [ - { - "ip_range": "ip_range_value", - "name": "name_value", - "project": "project_value", - "region": "region_value", - "status": "status_value", - } - ], - "self_link": "self_link_value", - "shared_secret": "shared_secret_value", - "status": "status_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = compute.PatchPublicAdvertisedPrefixeRequest.meta.fields[ - "public_advertised_prefix_resource" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "public_advertised_prefix_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["public_advertised_prefix_resource"][field]) - ): - del request_init["public_advertised_prefix_resource"][field][i][ - subfield - ] - else: - del request_init["public_advertised_prefix_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3303,14 +4595,14 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch_unary(request) + response = client.withdraw_unary(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Operation) -def test_patch_unary_rest_required_fields( - request_type=compute.PatchPublicAdvertisedPrefixeRequest, +def test_withdraw_unary_rest_required_fields( + request_type=compute.WithdrawPublicAdvertisedPrefixeRequest, ): transport_class = transports.PublicAdvertisedPrefixesRestTransport @@ -3331,7 +4623,7 @@ def test_patch_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).withdraw._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -3341,7 +4633,7 @@ def test_patch_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).withdraw._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -3373,10 +4665,9 @@ def test_patch_unary_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -3389,33 +4680,32 @@ def test_patch_unary_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch_unary(request) + response = client.withdraw_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_patch_unary_rest_unset_required_fields(): +def test_withdraw_unary_rest_unset_required_fields(): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.patch._get_unset_required_fields({}) + unset_fields = transport.withdraw._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( ( "project", "publicAdvertisedPrefix", - "publicAdvertisedPrefixResource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_patch_unary_rest_interceptors(null_interceptor): +def test_withdraw_unary_rest_interceptors(null_interceptor): transport = transports.PublicAdvertisedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3428,14 +4718,14 @@ def test_patch_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "post_patch" + transports.PublicAdvertisedPrefixesRestInterceptor, "post_withdraw" ) as post, mock.patch.object( - transports.PublicAdvertisedPrefixesRestInterceptor, "pre_patch" + transports.PublicAdvertisedPrefixesRestInterceptor, "pre_withdraw" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.PatchPublicAdvertisedPrefixeRequest.pb( - compute.PatchPublicAdvertisedPrefixeRequest() + pb_message = compute.WithdrawPublicAdvertisedPrefixeRequest.pb( + compute.WithdrawPublicAdvertisedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -3449,7 +4739,7 @@ def test_patch_unary_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.PatchPublicAdvertisedPrefixeRequest() + request = compute.WithdrawPublicAdvertisedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -3457,7 +4747,7 @@ def test_patch_unary_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.patch_unary( + client.withdraw_unary( request, metadata=[ ("key", "val"), @@ -3469,8 +4759,8 @@ def test_patch_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_patch_unary_rest_bad_request( - transport: str = "rest", request_type=compute.PatchPublicAdvertisedPrefixeRequest +def test_withdraw_unary_rest_bad_request( + transport: str = "rest", request_type=compute.WithdrawPublicAdvertisedPrefixeRequest ): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3490,10 +4780,10 @@ def test_patch_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.patch_unary(request) + client.withdraw_unary(request) -def test_patch_unary_rest_flattened(): +def test_withdraw_unary_rest_flattened(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3511,9 +4801,6 @@ def test_patch_unary_rest_flattened(): mock_args = dict( project="project_value", public_advertised_prefix="public_advertised_prefix_value", - public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( - creation_timestamp="creation_timestamp_value" - ), ) mock_args.update(sample_request) @@ -3526,20 +4813,20 @@ def test_patch_unary_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.patch_unary(**mock_args) + client.withdraw_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" + "%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}/withdraw" % client.transport._host, args[1], ) -def test_patch_unary_rest_flattened_error(transport: str = "rest"): +def test_withdraw_unary_rest_flattened_error(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3548,17 +4835,14 @@ def test_patch_unary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.patch_unary( - compute.PatchPublicAdvertisedPrefixeRequest(), + client.withdraw_unary( + compute.WithdrawPublicAdvertisedPrefixeRequest(), project="project_value", public_advertised_prefix="public_advertised_prefix_value", - public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( - creation_timestamp="creation_timestamp_value" - ), ) -def test_patch_unary_rest_error(): +def test_withdraw_unary_rest_error(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -3674,11 +4958,13 @@ def test_public_advertised_prefixes_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( + "announce", "delete", "get", "insert", "list", "patch", + "withdraw", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3816,6 +5102,9 @@ def test_public_advertised_prefixes_client_transport_session_collision(transport credentials=creds2, transport=transport_name, ) + session1 = client1.transport.announce._session + session2 = client2.transport.announce._session + assert session1 != session2 session1 = client1.transport.delete._session session2 = client2.transport.delete._session assert session1 != session2 @@ -3831,6 +5120,9 @@ def test_public_advertised_prefixes_client_transport_session_collision(transport session1 = client1.transport.patch._session session2 = client2.transport.patch._session assert session1 != session2 + session1 = client1.transport.withdraw._session + session2 = client2.transport.withdraw._session + assert session1 != session2 def test_common_billing_account_path(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py index 510a69d00d66..0ca508481790 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py @@ -676,6 +676,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -740,6 +741,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) @@ -963,11 +965,11 @@ def test_aggregated_list_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - compute.DeletePublicDelegatedPrefixeRequest, + compute.AnnouncePublicDelegatedPrefixeRequest, dict, ], ) -def test_delete_rest(request_type): +def test_announce_rest(request_type): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1018,7 +1020,7 @@ def test_delete_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete(request) + response = client.announce(request) # Establish that the response is the type that we expect. assert isinstance(response, extended_operation.ExtendedOperation) @@ -1046,8 +1048,8 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" -def test_delete_rest_required_fields( - request_type=compute.DeletePublicDelegatedPrefixeRequest, +def test_announce_rest_required_fields( + request_type=compute.AnnouncePublicDelegatedPrefixeRequest, ): transport_class = transports.PublicDelegatedPrefixesRestTransport @@ -1069,7 +1071,7 @@ def test_delete_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).announce._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1080,7 +1082,7 @@ def test_delete_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).announce._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -1112,7 +1114,7 @@ def test_delete_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } transcode.return_value = transcode_result @@ -1127,19 +1129,19 @@ def test_delete_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete(request) + response = client.announce(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_rest_unset_required_fields(): +def test_announce_rest_unset_required_fields(): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete._get_unset_required_fields({}) + unset_fields = transport.announce._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( @@ -1153,7 +1155,7 @@ def test_delete_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_rest_interceptors(null_interceptor): +def test_announce_rest_interceptors(null_interceptor): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1166,14 +1168,14 @@ def test_delete_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "post_delete" + transports.PublicDelegatedPrefixesRestInterceptor, "post_announce" ) as post, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "pre_delete" + transports.PublicDelegatedPrefixesRestInterceptor, "pre_announce" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.DeletePublicDelegatedPrefixeRequest.pb( - compute.DeletePublicDelegatedPrefixeRequest() + pb_message = compute.AnnouncePublicDelegatedPrefixeRequest.pb( + compute.AnnouncePublicDelegatedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -1187,7 +1189,7 @@ def test_delete_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.DeletePublicDelegatedPrefixeRequest() + request = compute.AnnouncePublicDelegatedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -1195,7 +1197,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.delete( + client.announce( request, metadata=[ ("key", "val"), @@ -1207,8 +1209,8 @@ def test_delete_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_rest_bad_request( - transport: str = "rest", request_type=compute.DeletePublicDelegatedPrefixeRequest +def test_announce_rest_bad_request( + transport: str = "rest", request_type=compute.AnnouncePublicDelegatedPrefixeRequest ): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1232,10 +1234,10 @@ def test_delete_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete(request) + client.announce(request) -def test_delete_rest_flattened(): +def test_announce_rest_flattened(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1270,20 +1272,20 @@ def test_delete_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete(**mock_args) + client.announce(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" + "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}/announce" % client.transport._host, args[1], ) -def test_delete_rest_flattened_error(transport: str = "rest"): +def test_announce_rest_flattened_error(transport: str = "rest"): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1292,15 +1294,15 @@ def test_delete_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete( - compute.DeletePublicDelegatedPrefixeRequest(), + client.announce( + compute.AnnouncePublicDelegatedPrefixeRequest(), project="project_value", region="region_value", public_delegated_prefix="public_delegated_prefix_value", ) -def test_delete_rest_error(): +def test_announce_rest_error(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1309,11 +1311,11 @@ def test_delete_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.DeletePublicDelegatedPrefixeRequest, + compute.AnnouncePublicDelegatedPrefixeRequest, dict, ], ) -def test_delete_unary_rest(request_type): +def test_announce_unary_rest(request_type): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1364,14 +1366,14 @@ def test_delete_unary_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_unary(request) + response = client.announce_unary(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Operation) -def test_delete_unary_rest_required_fields( - request_type=compute.DeletePublicDelegatedPrefixeRequest, +def test_announce_unary_rest_required_fields( + request_type=compute.AnnouncePublicDelegatedPrefixeRequest, ): transport_class = transports.PublicDelegatedPrefixesRestTransport @@ -1393,7 +1395,7 @@ def test_delete_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).announce._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1404,7 +1406,7 @@ def test_delete_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).announce._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -1436,7 +1438,7 @@ def test_delete_unary_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } transcode.return_value = transcode_result @@ -1451,19 +1453,19 @@ def test_delete_unary_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_unary(request) + response = client.announce_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_unary_rest_unset_required_fields(): +def test_announce_unary_rest_unset_required_fields(): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete._get_unset_required_fields({}) + unset_fields = transport.announce._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( @@ -1477,7 +1479,7 @@ def test_delete_unary_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_unary_rest_interceptors(null_interceptor): +def test_announce_unary_rest_interceptors(null_interceptor): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1490,14 +1492,14 @@ def test_delete_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "post_delete" + transports.PublicDelegatedPrefixesRestInterceptor, "post_announce" ) as post, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "pre_delete" + transports.PublicDelegatedPrefixesRestInterceptor, "pre_announce" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.DeletePublicDelegatedPrefixeRequest.pb( - compute.DeletePublicDelegatedPrefixeRequest() + pb_message = compute.AnnouncePublicDelegatedPrefixeRequest.pb( + compute.AnnouncePublicDelegatedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -1511,7 +1513,7 @@ def test_delete_unary_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.DeletePublicDelegatedPrefixeRequest() + request = compute.AnnouncePublicDelegatedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -1519,7 +1521,7 @@ def test_delete_unary_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.delete_unary( + client.announce_unary( request, metadata=[ ("key", "val"), @@ -1531,8 +1533,8 @@ def test_delete_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_unary_rest_bad_request( - transport: str = "rest", request_type=compute.DeletePublicDelegatedPrefixeRequest +def test_announce_unary_rest_bad_request( + transport: str = "rest", request_type=compute.AnnouncePublicDelegatedPrefixeRequest ): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1556,10 +1558,10 @@ def test_delete_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_unary(request) + client.announce_unary(request) -def test_delete_unary_rest_flattened(): +def test_announce_unary_rest_flattened(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1594,20 +1596,20 @@ def test_delete_unary_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_unary(**mock_args) + client.announce_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" + "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}/announce" % client.transport._host, args[1], ) -def test_delete_unary_rest_flattened_error(transport: str = "rest"): +def test_announce_unary_rest_flattened_error(transport: str = "rest"): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1616,15 +1618,15 @@ def test_delete_unary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_unary( - compute.DeletePublicDelegatedPrefixeRequest(), + client.announce_unary( + compute.AnnouncePublicDelegatedPrefixeRequest(), project="project_value", region="region_value", public_delegated_prefix="public_delegated_prefix_value", ) -def test_delete_unary_rest_error(): +def test_announce_unary_rest_error(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1633,11 +1635,11 @@ def test_delete_unary_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.GetPublicDelegatedPrefixeRequest, + compute.DeletePublicDelegatedPrefixeRequest, dict, ], ) -def test_get_rest(request_type): +def test_delete_rest(request_type): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1654,50 +1656,70 @@ def test_get_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.PublicDelegatedPrefix( + return_value = compute.Operation( + client_operation_id="client_operation_id_value", creation_timestamp="creation_timestamp_value", description="description_value", - fingerprint="fingerprint_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, id=205, - ip_cidr_range="ip_cidr_range_value", - is_live_migration=True, + insert_time="insert_time_value", kind="kind_value", name="name_value", - parent_prefix="parent_prefix_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, region="region_value", self_link="self_link_value", - status="status_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.PublicDelegatedPrefix.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get(request) + response = client.delete(request) # Establish that the response is the type that we expect. - assert isinstance(response, compute.PublicDelegatedPrefix) + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" - assert response.fingerprint == "fingerprint_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 assert response.id == 205 - assert response.ip_cidr_range == "ip_cidr_range_value" - assert response.is_live_migration is True + assert response.insert_time == "insert_time_value" assert response.kind == "kind_value" assert response.name == "name_value" - assert response.parent_prefix == "parent_prefix_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.status == "status_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" -def test_get_rest_required_fields( - request_type=compute.GetPublicDelegatedPrefixeRequest, +def test_delete_rest_required_fields( + request_type=compute.DeletePublicDelegatedPrefixeRequest, ): transport_class = transports.PublicDelegatedPrefixesRestTransport @@ -1719,7 +1741,7 @@ def test_get_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get._get_unset_required_fields(jsonified_request) + ).delete._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1730,7 +1752,9 @@ def test_get_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get._get_unset_required_fields(jsonified_request) + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -1748,7 +1772,7 @@ def test_get_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.PublicDelegatedPrefix() + return_value = compute.Operation() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1760,7 +1784,7 @@ def test_get_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result @@ -1769,27 +1793,27 @@ def test_get_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.PublicDelegatedPrefix.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get(request) + response = client.delete(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_rest_unset_required_fields(): +def test_delete_rest_unset_required_fields(): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get._get_unset_required_fields({}) + unset_fields = transport.delete._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("requestId",)) & set( ( "project", @@ -1801,7 +1825,7 @@ def test_get_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_rest_interceptors(null_interceptor): +def test_delete_rest_interceptors(null_interceptor): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1814,14 +1838,14 @@ def test_get_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "post_get" + transports.PublicDelegatedPrefixesRestInterceptor, "post_delete" ) as post, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "pre_get" + transports.PublicDelegatedPrefixesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.GetPublicDelegatedPrefixeRequest.pb( - compute.GetPublicDelegatedPrefixeRequest() + pb_message = compute.DeletePublicDelegatedPrefixeRequest.pb( + compute.DeletePublicDelegatedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -1833,19 +1857,17 @@ def test_get_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.PublicDelegatedPrefix.to_json( - compute.PublicDelegatedPrefix() - ) + req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.GetPublicDelegatedPrefixeRequest() + request = compute.DeletePublicDelegatedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.PublicDelegatedPrefix() + post.return_value = compute.Operation() - client.get( + client.delete( request, metadata=[ ("key", "val"), @@ -1857,8 +1879,8 @@ def test_get_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_rest_bad_request( - transport: str = "rest", request_type=compute.GetPublicDelegatedPrefixeRequest +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeletePublicDelegatedPrefixeRequest ): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1882,10 +1904,10 @@ def test_get_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get(request) + client.delete(request) -def test_get_rest_flattened(): +def test_delete_rest_flattened(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1894,7 +1916,7 @@ def test_get_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.PublicDelegatedPrefix() + return_value = compute.Operation() # get arguments that satisfy an http rule for this method sample_request = { @@ -1915,12 +1937,12 @@ def test_get_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.PublicDelegatedPrefix.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get(**mock_args) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -1933,7 +1955,7 @@ def test_get_rest_flattened(): ) -def test_get_rest_flattened_error(transport: str = "rest"): +def test_delete_rest_flattened_error(transport: str = "rest"): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1942,15 +1964,15 @@ def test_get_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get( - compute.GetPublicDelegatedPrefixeRequest(), + client.delete( + compute.DeletePublicDelegatedPrefixeRequest(), project="project_value", region="region_value", public_delegated_prefix="public_delegated_prefix_value", ) -def test_get_rest_error(): +def test_delete_rest_error(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1959,192 +1981,75 @@ def test_get_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.InsertPublicDelegatedPrefixeRequest, + compute.DeletePublicDelegatedPrefixeRequest, dict, ], ) -def test_insert_rest(request_type): +def test_delete_unary_rest(request_type): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2"} - request_init["public_delegated_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "is_live_migration": True, - "kind": "kind_value", - "name": "name_value", - "parent_prefix": "parent_prefix_value", - "public_delegated_sub_prefixs": [ - { - "delegatee_project": "delegatee_project_value", - "description": "description_value", - "ip_cidr_range": "ip_cidr_range_value", - "is_address": True, - "name": "name_value", - "region": "region_value", - "status": "status_value", - } - ], - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request = request_type(**request_init) - # Determine if the message type is proto-plus or protobuf - test_field = compute.InsertPublicDelegatedPrefixeRequest.meta.fields[ - "public_delegated_prefix_resource" - ] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "public_delegated_prefix_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["public_delegated_prefix_resource"][field]) - ): - del request_init["public_delegated_prefix_resource"][field][i][ - subfield - ] - else: - del request_init["public_delegated_prefix_resource"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.Operation( - client_operation_id="client_operation_id_value", - creation_timestamp="creation_timestamp_value", - description="description_value", - end_time="end_time_value", - http_error_message="http_error_message_value", - http_error_status_code=2374, - id=205, - insert_time="insert_time_value", - kind="kind_value", - name="name_value", - operation_group_id="operation_group_id_value", - operation_type="operation_type_value", - progress=885, - region="region_value", - self_link="self_link_value", - start_time="start_time_value", - status=compute.Operation.Status.DONE, - status_message="status_message_value", - target_id=947, - target_link="target_link_value", - user="user_value", - zone="zone_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.insert(request) + response = client.delete_unary(request) # Establish that the response is the type that we expect. - assert isinstance(response, extended_operation.ExtendedOperation) - assert response.client_operation_id == "client_operation_id_value" - assert response.creation_timestamp == "creation_timestamp_value" - assert response.description == "description_value" - assert response.end_time == "end_time_value" - assert response.http_error_message == "http_error_message_value" - assert response.http_error_status_code == 2374 - assert response.id == 205 - assert response.insert_time == "insert_time_value" - assert response.kind == "kind_value" - assert response.name == "name_value" - assert response.operation_group_id == "operation_group_id_value" - assert response.operation_type == "operation_type_value" - assert response.progress == 885 - assert response.region == "region_value" - assert response.self_link == "self_link_value" - assert response.start_time == "start_time_value" - assert response.status == compute.Operation.Status.DONE - assert response.status_message == "status_message_value" - assert response.target_id == 947 - assert response.target_link == "target_link_value" - assert response.user == "user_value" - assert response.zone == "zone_value" + assert isinstance(response, compute.Operation) -def test_insert_rest_required_fields( - request_type=compute.InsertPublicDelegatedPrefixeRequest, +def test_delete_unary_rest_required_fields( + request_type=compute.DeletePublicDelegatedPrefixeRequest, ): transport_class = transports.PublicDelegatedPrefixesRestTransport request_init = {} request_init["project"] = "" + request_init["public_delegated_prefix"] = "" request_init["region"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) @@ -2160,17 +2065,18 @@ def test_insert_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).insert._get_unset_required_fields(jsonified_request) + ).delete._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["project"] = "project_value" + jsonified_request["publicDelegatedPrefix"] = "public_delegated_prefix_value" jsonified_request["region"] = "region_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).insert._get_unset_required_fields(jsonified_request) + ).delete._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -2178,6 +2084,8 @@ def test_insert_rest_required_fields( # verify required fields with non-default values are left alone assert "project" in jsonified_request assert jsonified_request["project"] == "project_value" + assert "publicDelegatedPrefix" in jsonified_request + assert jsonified_request["publicDelegatedPrefix"] == "public_delegated_prefix_value" assert "region" in jsonified_request assert jsonified_request["region"] == "region_value" @@ -2200,10 +2108,9 @@ def test_insert_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -2216,25 +2123,25 @@ def test_insert_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.insert(request) + response = client.delete_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_insert_rest_unset_required_fields(): +def test_delete_unary_rest_unset_required_fields(): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.insert._get_unset_required_fields({}) + unset_fields = transport.delete._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( ( "project", - "publicDelegatedPrefixResource", + "publicDelegatedPrefix", "region", ) ) @@ -2242,7 +2149,7 @@ def test_insert_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_insert_rest_interceptors(null_interceptor): +def test_delete_unary_rest_interceptors(null_interceptor): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2255,14 +2162,14 @@ def test_insert_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "post_insert" + transports.PublicDelegatedPrefixesRestInterceptor, "post_delete" ) as post, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "pre_insert" + transports.PublicDelegatedPrefixesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.InsertPublicDelegatedPrefixeRequest.pb( - compute.InsertPublicDelegatedPrefixeRequest() + pb_message = compute.DeletePublicDelegatedPrefixeRequest.pb( + compute.DeletePublicDelegatedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -2276,7 +2183,7 @@ def test_insert_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.InsertPublicDelegatedPrefixeRequest() + request = compute.DeletePublicDelegatedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -2284,7 +2191,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.insert( + client.delete_unary( request, metadata=[ ("key", "val"), @@ -2296,8 +2203,8 @@ def test_insert_rest_interceptors(null_interceptor): post.assert_called_once() -def test_insert_rest_bad_request( - transport: str = "rest", request_type=compute.InsertPublicDelegatedPrefixeRequest +def test_delete_unary_rest_bad_request( + transport: str = "rest", request_type=compute.DeletePublicDelegatedPrefixeRequest ): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2305,7 +2212,11 @@ def test_insert_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2"} + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2317,10 +2228,10 @@ def test_insert_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.insert(request) + client.delete_unary(request) -def test_insert_rest_flattened(): +def test_delete_unary_rest_flattened(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2332,15 +2243,17 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # get arguments that satisfy an http rule for this method - sample_request = {"project": "sample1", "region": "sample2"} + sample_request = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } # get truthy value for each flattened field mock_args = dict( project="project_value", region="region_value", - public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ), + public_delegated_prefix="public_delegated_prefix_value", ) mock_args.update(sample_request) @@ -2353,20 +2266,20 @@ def test_insert_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.insert(**mock_args) + client.delete_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes" + "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" % client.transport._host, args[1], ) -def test_insert_rest_flattened_error(transport: str = "rest"): +def test_delete_unary_rest_flattened_error(transport: str = "rest"): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2375,17 +2288,15 @@ def test_insert_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.insert( - compute.InsertPublicDelegatedPrefixeRequest(), + client.delete_unary( + compute.DeletePublicDelegatedPrefixeRequest(), project="project_value", region="region_value", - public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ), + public_delegated_prefix="public_delegated_prefix_value", ) -def test_insert_rest_error(): +def test_delete_unary_rest_error(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2394,170 +2305,79 @@ def test_insert_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.InsertPublicDelegatedPrefixeRequest, + compute.GetPublicDelegatedPrefixeRequest, dict, ], ) -def test_insert_unary_rest(request_type): +def test_get_rest(request_type): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2"} - request_init["public_delegated_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "is_live_migration": True, - "kind": "kind_value", - "name": "name_value", - "parent_prefix": "parent_prefix_value", - "public_delegated_sub_prefixs": [ - { - "delegatee_project": "delegatee_project_value", - "description": "description_value", - "ip_cidr_range": "ip_cidr_range_value", - "is_address": True, - "name": "name_value", - "region": "region_value", - "status": "status_value", - } - ], - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request = request_type(**request_init) - # Determine if the message type is proto-plus or protobuf - test_field = compute.InsertPublicDelegatedPrefixeRequest.meta.fields[ - "public_delegated_prefix_resource" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "public_delegated_prefix_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["public_delegated_prefix_resource"][field]) - ): - del request_init["public_delegated_prefix_resource"][field][i][ - subfield - ] - else: - del request_init["public_delegated_prefix_resource"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.Operation( - client_operation_id="client_operation_id_value", - creation_timestamp="creation_timestamp_value", - description="description_value", - end_time="end_time_value", - http_error_message="http_error_message_value", - http_error_status_code=2374, - id=205, - insert_time="insert_time_value", - kind="kind_value", - name="name_value", - operation_group_id="operation_group_id_value", - operation_type="operation_type_value", - progress=885, - region="region_value", - self_link="self_link_value", - start_time="start_time_value", - status=compute.Operation.Status.DONE, - status_message="status_message_value", - target_id=947, - target_link="target_link_value", - user="user_value", - zone="zone_value", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefix( + byoip_api_version="byoip_api_version_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + fingerprint="fingerprint_value", + id=205, + ip_cidr_range="ip_cidr_range_value", + is_live_migration=True, + kind="kind_value", + name="name_value", + parent_prefix="parent_prefix_value", + region="region_value", + self_link="self_link_value", + status="status_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.PublicDelegatedPrefix.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.insert_unary(request) + response = client.get(request) # Establish that the response is the type that we expect. - assert isinstance(response, compute.Operation) + assert isinstance(response, compute.PublicDelegatedPrefix) + assert response.byoip_api_version == "byoip_api_version_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.fingerprint == "fingerprint_value" + assert response.id == 205 + assert response.ip_cidr_range == "ip_cidr_range_value" + assert response.is_live_migration is True + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.parent_prefix == "parent_prefix_value" + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.status == "status_value" -def test_insert_unary_rest_required_fields( - request_type=compute.InsertPublicDelegatedPrefixeRequest, +def test_get_rest_required_fields( + request_type=compute.GetPublicDelegatedPrefixeRequest, ): transport_class = transports.PublicDelegatedPrefixesRestTransport request_init = {} request_init["project"] = "" + request_init["public_delegated_prefix"] = "" request_init["region"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) @@ -2573,24 +2393,25 @@ def test_insert_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).insert._get_unset_required_fields(jsonified_request) + ).get._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["project"] = "project_value" + jsonified_request["publicDelegatedPrefix"] = "public_delegated_prefix_value" jsonified_request["region"] = "region_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).insert._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + ).get._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "project" in jsonified_request assert jsonified_request["project"] == "project_value" + assert "publicDelegatedPrefix" in jsonified_request + assert jsonified_request["publicDelegatedPrefix"] == "public_delegated_prefix_value" assert "region" in jsonified_request assert jsonified_request["region"] == "region_value" @@ -2601,7 +2422,7 @@ def test_insert_unary_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.Operation() + return_value = compute.PublicDelegatedPrefix() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2613,41 +2434,40 @@ def test_insert_unary_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.PublicDelegatedPrefix.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.insert_unary(request) + response = client.get(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_insert_unary_rest_unset_required_fields(): +def test_get_rest_unset_required_fields(): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.insert._get_unset_required_fields({}) + unset_fields = transport.get._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) + set(()) & set( ( "project", - "publicDelegatedPrefixResource", + "publicDelegatedPrefix", "region", ) ) @@ -2655,7 +2475,7 @@ def test_insert_unary_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_insert_unary_rest_interceptors(null_interceptor): +def test_get_rest_interceptors(null_interceptor): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2668,14 +2488,14 @@ def test_insert_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "post_insert" + transports.PublicDelegatedPrefixesRestInterceptor, "post_get" ) as post, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "pre_insert" + transports.PublicDelegatedPrefixesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.InsertPublicDelegatedPrefixeRequest.pb( - compute.InsertPublicDelegatedPrefixeRequest() + pb_message = compute.GetPublicDelegatedPrefixeRequest.pb( + compute.GetPublicDelegatedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -2687,17 +2507,19 @@ def test_insert_unary_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.Operation.to_json(compute.Operation()) + req.return_value._content = compute.PublicDelegatedPrefix.to_json( + compute.PublicDelegatedPrefix() + ) - request = compute.InsertPublicDelegatedPrefixeRequest() + request = compute.GetPublicDelegatedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.Operation() + post.return_value = compute.PublicDelegatedPrefix() - client.insert_unary( + client.get( request, metadata=[ ("key", "val"), @@ -2709,8 +2531,8 @@ def test_insert_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_insert_unary_rest_bad_request( - transport: str = "rest", request_type=compute.InsertPublicDelegatedPrefixeRequest +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetPublicDelegatedPrefixeRequest ): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2718,7 +2540,11 @@ def test_insert_unary_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2"} + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2730,10 +2556,10 @@ def test_insert_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.insert_unary(request) + client.get(request) -def test_insert_unary_rest_flattened(): +def test_get_rest_flattened(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2742,18 +2568,20 @@ def test_insert_unary_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Operation() + return_value = compute.PublicDelegatedPrefix() # get arguments that satisfy an http rule for this method - sample_request = {"project": "sample1", "region": "sample2"} + sample_request = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } # get truthy value for each flattened field mock_args = dict( project="project_value", region="region_value", - public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ), + public_delegated_prefix="public_delegated_prefix_value", ) mock_args.update(sample_request) @@ -2761,25 +2589,25 @@ def test_insert_unary_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.PublicDelegatedPrefix.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.insert_unary(**mock_args) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes" + "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" % client.transport._host, args[1], ) -def test_insert_unary_rest_flattened_error(transport: str = "rest"): +def test_get_rest_flattened_error(transport: str = "rest"): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2788,17 +2616,15 @@ def test_insert_unary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.insert_unary( - compute.InsertPublicDelegatedPrefixeRequest(), + client.get( + compute.GetPublicDelegatedPrefixeRequest(), project="project_value", region="region_value", - public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ), + public_delegated_prefix="public_delegated_prefix_value", ) -def test_insert_unary_rest_error(): +def test_get_rest_error(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2807,11 +2633,11 @@ def test_insert_unary_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.ListPublicDelegatedPrefixesRequest, + compute.InsertPublicDelegatedPrefixeRequest, dict, ], ) -def test_list_rest(request_type): +def test_insert_rest(request_type): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2819,39 +2645,176 @@ def test_list_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request = request_type(**request_init) + request_init["public_delegated_prefix_resource"] = { + "byoip_api_version": "byoip_api_version_value", + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "fingerprint": "fingerprint_value", + "id": 205, + "ip_cidr_range": "ip_cidr_range_value", + "is_live_migration": True, + "kind": "kind_value", + "name": "name_value", + "parent_prefix": "parent_prefix_value", + "public_delegated_sub_prefixs": [ + { + "delegatee_project": "delegatee_project_value", + "description": "description_value", + "ip_cidr_range": "ip_cidr_range_value", + "is_address": True, + "name": "name_value", + "region": "region_value", + "status": "status_value", + } + ], + "region": "region_value", + "self_link": "self_link_value", + "status": "status_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.PublicDelegatedPrefixList( - id="id_value", - kind="kind_value", - next_page_token="next_page_token_value", - self_link="self_link_value", - ) + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertPublicDelegatedPrefixeRequest.meta.fields[ + "public_delegated_prefix_resource" + ] - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = compute.PublicDelegatedPrefixList.pb(return_value) + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_delegated_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_delegated_prefix_resource"][field]) + ): + del request_init["public_delegated_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_delegated_prefix_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list(request) + response = client.insert(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPager) - assert response.id == "id_value" + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" assert response.kind == "kind_value" - assert response.next_page_token == "next_page_token_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" -def test_list_rest_required_fields( - request_type=compute.ListPublicDelegatedPrefixesRequest, +def test_insert_rest_required_fields( + request_type=compute.InsertPublicDelegatedPrefixeRequest, ): transport_class = transports.PublicDelegatedPrefixesRestTransport @@ -2872,7 +2835,7 @@ def test_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list._get_unset_required_fields(jsonified_request) + ).insert._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2882,17 +2845,9 @@ def test_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list._get_unset_required_fields(jsonified_request) + ).insert._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "max_results", - "order_by", - "page_token", - "return_partial_success", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2908,7 +2863,7 @@ def test_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.PublicDelegatedPrefixList() + return_value = compute.Operation() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2920,47 +2875,41 @@ def test_list_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.PublicDelegatedPrefixList.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list(request) + response = client.insert(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_rest_unset_required_fields(): +def test_insert_rest_unset_required_fields(): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list._get_unset_required_fields({}) + unset_fields = transport.insert._get_unset_required_fields({}) assert set(unset_fields) == ( - set( - ( - "filter", - "maxResults", - "orderBy", - "pageToken", - "returnPartialSuccess", - ) - ) + set(("requestId",)) & set( ( "project", + "publicDelegatedPrefixResource", "region", ) ) @@ -2968,7 +2917,7 @@ def test_list_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_rest_interceptors(null_interceptor): +def test_insert_rest_interceptors(null_interceptor): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2981,14 +2930,14 @@ def test_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "post_list" + transports.PublicDelegatedPrefixesRestInterceptor, "post_insert" ) as post, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "pre_list" + transports.PublicDelegatedPrefixesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.ListPublicDelegatedPrefixesRequest.pb( - compute.ListPublicDelegatedPrefixesRequest() + pb_message = compute.InsertPublicDelegatedPrefixeRequest.pb( + compute.InsertPublicDelegatedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -3000,19 +2949,17 @@ def test_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.PublicDelegatedPrefixList.to_json( - compute.PublicDelegatedPrefixList() - ) + req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.ListPublicDelegatedPrefixesRequest() + request = compute.InsertPublicDelegatedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.PublicDelegatedPrefixList() + post.return_value = compute.Operation() - client.list( + client.insert( request, metadata=[ ("key", "val"), @@ -3024,8 +2971,8 @@ def test_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_rest_bad_request( - transport: str = "rest", request_type=compute.ListPublicDelegatedPrefixesRequest +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertPublicDelegatedPrefixeRequest ): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3045,10 +2992,10 @@ def test_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list(request) + client.insert(request) -def test_list_rest_flattened(): +def test_insert_rest_flattened(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3057,7 +3004,7 @@ def test_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.PublicDelegatedPrefixList() + return_value = compute.Operation() # get arguments that satisfy an http rule for this method sample_request = {"project": "sample1", "region": "sample2"} @@ -3066,6 +3013,9 @@ def test_list_rest_flattened(): mock_args = dict( project="project_value", region="region_value", + public_delegated_prefix_resource=compute.PublicDelegatedPrefix( + byoip_api_version="byoip_api_version_value" + ), ) mock_args.update(sample_request) @@ -3073,12 +3023,12 @@ def test_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.PublicDelegatedPrefixList.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list(**mock_args) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -3091,7 +3041,7 @@ def test_list_rest_flattened(): ) -def test_list_rest_flattened_error(transport: str = "rest"): +def test_insert_rest_flattened_error(transport: str = "rest"): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3100,94 +3050,39 @@ def test_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list( - compute.ListPublicDelegatedPrefixesRequest(), + client.insert( + compute.InsertPublicDelegatedPrefixeRequest(), project="project_value", region="region_value", + public_delegated_prefix_resource=compute.PublicDelegatedPrefix( + byoip_api_version="byoip_api_version_value" + ), ) -def test_list_rest_pager(transport: str = "rest"): +def test_insert_rest_error(): client = PublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - compute.PublicDelegatedPrefixList( - items=[ - compute.PublicDelegatedPrefix(), - compute.PublicDelegatedPrefix(), - compute.PublicDelegatedPrefix(), - ], - next_page_token="abc", - ), - compute.PublicDelegatedPrefixList( - items=[], - next_page_token="def", - ), - compute.PublicDelegatedPrefixList( - items=[ - compute.PublicDelegatedPrefix(), - ], - next_page_token="ghi", - ), - compute.PublicDelegatedPrefixList( - items=[ - compute.PublicDelegatedPrefix(), - compute.PublicDelegatedPrefix(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(compute.PublicDelegatedPrefixList.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"project": "sample1", "region": "sample2"} - - pager = client.list(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, compute.PublicDelegatedPrefix) for i in results) - - pages = list(client.list(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - compute.PatchPublicDelegatedPrefixeRequest, + compute.InsertPublicDelegatedPrefixeRequest, dict, ], ) -def test_patch_rest(request_type): +def test_insert_unary_rest(request_type): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "project": "sample1", - "region": "sample2", - "public_delegated_prefix": "sample3", - } + request_init = {"project": "sample1", "region": "sample2"} request_init["public_delegated_prefix_resource"] = { + "byoip_api_version": "byoip_api_version_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", "fingerprint": "fingerprint_value", @@ -3217,7 +3112,7 @@ def test_patch_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = compute.PatchPublicDelegatedPrefixeRequest.meta.fields[ + test_field = compute.InsertPublicDelegatedPrefixeRequest.meta.fields[ "public_delegated_prefix_resource" ] @@ -3230,63 +3125,1632 @@ def get_message_fields(field): if hasattr(field, "message") and field.message: is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_delegated_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_delegated_prefix_resource"][field]) + ): + del request_init["public_delegated_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_delegated_prefix_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields( + request_type=compute.InsertPublicDelegatedPrefixeRequest, +): + transport_class = transports.PublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "publicDelegatedPrefixResource", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PublicDelegatedPrefixesRestInterceptor(), + ) + client = PublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "post_insert" + ) as post, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "pre_insert" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertPublicDelegatedPrefixeRequest.pb( + compute.InsertPublicDelegatedPrefixeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertPublicDelegatedPrefixeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request( + transport: str = "rest", request_type=compute.InsertPublicDelegatedPrefixeRequest +): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + public_delegated_prefix_resource=compute.PublicDelegatedPrefix( + byoip_api_version="byoip_api_version_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes" + % client.transport._host, + args[1], + ) + + +def test_insert_unary_rest_flattened_error(transport: str = "rest"): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertPublicDelegatedPrefixeRequest(), + project="project_value", + region="region_value", + public_delegated_prefix_resource=compute.PublicDelegatedPrefix( + byoip_api_version="byoip_api_version_value" + ), + ) + + +def test_insert_unary_rest_error(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListPublicDelegatedPrefixesRequest, + dict, + ], +) +def test_list_rest(request_type): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefixList( + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + + +def test_list_rest_required_fields( + request_type=compute.ListPublicDelegatedPrefixesRequest, +): + transport_class = transports.PublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefixList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set( + ( + "project", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PublicDelegatedPrefixesRestInterceptor(), + ) + client = PublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "post_list" + ) as post, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "pre_list" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListPublicDelegatedPrefixesRequest.pb( + compute.ListPublicDelegatedPrefixesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.PublicDelegatedPrefixList.to_json( + compute.PublicDelegatedPrefixList() + ) + + request = compute.ListPublicDelegatedPrefixesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.PublicDelegatedPrefixList() + + client.list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListPublicDelegatedPrefixesRequest +): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefixList() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes" + % client.transport._host, + args[1], + ) + + +def test_list_rest_flattened_error(transport: str = "rest"): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListPublicDelegatedPrefixesRequest(), + project="project_value", + region="region_value", + ) + + +def test_list_rest_pager(transport: str = "rest"): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.PublicDelegatedPrefixList( + items=[ + compute.PublicDelegatedPrefix(), + compute.PublicDelegatedPrefix(), + compute.PublicDelegatedPrefix(), + ], + next_page_token="abc", + ), + compute.PublicDelegatedPrefixList( + items=[], + next_page_token="def", + ), + compute.PublicDelegatedPrefixList( + items=[ + compute.PublicDelegatedPrefix(), + ], + next_page_token="ghi", + ), + compute.PublicDelegatedPrefixList( + items=[ + compute.PublicDelegatedPrefix(), + compute.PublicDelegatedPrefix(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.PublicDelegatedPrefixList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1", "region": "sample2"} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.PublicDelegatedPrefix) for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchPublicDelegatedPrefixeRequest, + dict, + ], +) +def test_patch_rest(request_type): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + request_init["public_delegated_prefix_resource"] = { + "byoip_api_version": "byoip_api_version_value", + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "fingerprint": "fingerprint_value", + "id": 205, + "ip_cidr_range": "ip_cidr_range_value", + "is_live_migration": True, + "kind": "kind_value", + "name": "name_value", + "parent_prefix": "parent_prefix_value", + "public_delegated_sub_prefixs": [ + { + "delegatee_project": "delegatee_project_value", + "description": "description_value", + "ip_cidr_range": "ip_cidr_range_value", + "is_address": True, + "name": "name_value", + "region": "region_value", + "status": "status_value", + } + ], + "region": "region_value", + "self_link": "self_link_value", + "status": "status_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchPublicDelegatedPrefixeRequest.meta.fields[ + "public_delegated_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_delegated_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_delegated_prefix_resource"][field]) + ): + del request_init["public_delegated_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_delegated_prefix_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_patch_rest_required_fields( + request_type=compute.PatchPublicDelegatedPrefixeRequest, +): + transport_class = transports.PublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_delegated_prefix"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["publicDelegatedPrefix"] = "public_delegated_prefix_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "publicDelegatedPrefix" in jsonified_request + assert jsonified_request["publicDelegatedPrefix"] == "public_delegated_prefix_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "publicDelegatedPrefix", + "publicDelegatedPrefixResource", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PublicDelegatedPrefixesRestInterceptor(), + ) + client = PublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchPublicDelegatedPrefixeRequest.pb( + compute.PatchPublicDelegatedPrefixeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchPublicDelegatedPrefixeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchPublicDelegatedPrefixeRequest +): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + public_delegated_prefix="public_delegated_prefix_value", + public_delegated_prefix_resource=compute.PublicDelegatedPrefix( + byoip_api_version="byoip_api_version_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchPublicDelegatedPrefixeRequest(), + project="project_value", + region="region_value", + public_delegated_prefix="public_delegated_prefix_value", + public_delegated_prefix_resource=compute.PublicDelegatedPrefix( + byoip_api_version="byoip_api_version_value" + ), + ) + + +def test_patch_rest_error(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchPublicDelegatedPrefixeRequest, + dict, + ], +) +def test_patch_unary_rest(request_type): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + request_init["public_delegated_prefix_resource"] = { + "byoip_api_version": "byoip_api_version_value", + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "fingerprint": "fingerprint_value", + "id": 205, + "ip_cidr_range": "ip_cidr_range_value", + "is_live_migration": True, + "kind": "kind_value", + "name": "name_value", + "parent_prefix": "parent_prefix_value", + "public_delegated_sub_prefixs": [ + { + "delegatee_project": "delegatee_project_value", + "description": "description_value", + "ip_cidr_range": "ip_cidr_range_value", + "is_address": True, + "name": "name_value", + "region": "region_value", + "status": "status_value", + } + ], + "region": "region_value", + "self_link": "self_link_value", + "status": "status_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchPublicDelegatedPrefixeRequest.meta.fields[ + "public_delegated_prefix_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "public_delegated_prefix_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["public_delegated_prefix_resource"][field]) + ): + del request_init["public_delegated_prefix_resource"][field][i][ + subfield + ] + else: + del request_init["public_delegated_prefix_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields( + request_type=compute.PatchPublicDelegatedPrefixeRequest, +): + transport_class = transports.PublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_delegated_prefix"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["publicDelegatedPrefix"] = "public_delegated_prefix_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "publicDelegatedPrefix" in jsonified_request + assert jsonified_request["publicDelegatedPrefix"] == "public_delegated_prefix_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "publicDelegatedPrefix", + "publicDelegatedPrefixResource", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PublicDelegatedPrefixesRestInterceptor(), + ) + client = PublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchPublicDelegatedPrefixeRequest.pb( + compute.PatchPublicDelegatedPrefixeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchPublicDelegatedPrefixeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request( + transport: str = "rest", request_type=compute.PatchPublicDelegatedPrefixeRequest +): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + public_delegated_prefix="public_delegated_prefix_value", + public_delegated_prefix_resource=compute.PublicDelegatedPrefix( + byoip_api_version="byoip_api_version_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" + % client.transport._host, + args[1], + ) + + +def test_patch_unary_rest_flattened_error(transport: str = "rest"): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchPublicDelegatedPrefixeRequest(), + project="project_value", + region="region_value", + public_delegated_prefix="public_delegated_prefix_value", + public_delegated_prefix_resource=compute.PublicDelegatedPrefix( + byoip_api_version="byoip_api_version_value" + ), + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - subfields_not_in_runtime = [] +def test_patch_unary_rest_error(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "public_delegated_prefix_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + compute.WithdrawPublicDelegatedPrefixeRequest, + dict, + ], +) +def test_withdraw_rest(request_type): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["public_delegated_prefix_resource"][field]) - ): - del request_init["public_delegated_prefix_resource"][field][i][ - subfield - ] - else: - del request_init["public_delegated_prefix_resource"][field][subfield] + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3326,7 +4790,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch(request) + response = client.withdraw(request) # Establish that the response is the type that we expect. assert isinstance(response, extended_operation.ExtendedOperation) @@ -3354,8 +4818,8 @@ def get_message_fields(field): assert response.zone == "zone_value" -def test_patch_rest_required_fields( - request_type=compute.PatchPublicDelegatedPrefixeRequest, +def test_withdraw_rest_required_fields( + request_type=compute.WithdrawPublicDelegatedPrefixeRequest, ): transport_class = transports.PublicDelegatedPrefixesRestTransport @@ -3377,7 +4841,7 @@ def test_patch_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).withdraw._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -3388,7 +4852,7 @@ def test_patch_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).withdraw._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -3420,10 +4884,9 @@ def test_patch_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -3436,26 +4899,25 @@ def test_patch_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch(request) + response = client.withdraw(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_patch_rest_unset_required_fields(): +def test_withdraw_rest_unset_required_fields(): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.patch._get_unset_required_fields({}) + unset_fields = transport.withdraw._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( ( "project", "publicDelegatedPrefix", - "publicDelegatedPrefixResource", "region", ) ) @@ -3463,7 +4925,7 @@ def test_patch_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_patch_rest_interceptors(null_interceptor): +def test_withdraw_rest_interceptors(null_interceptor): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3476,14 +4938,14 @@ def test_patch_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "post_patch" + transports.PublicDelegatedPrefixesRestInterceptor, "post_withdraw" ) as post, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "pre_patch" + transports.PublicDelegatedPrefixesRestInterceptor, "pre_withdraw" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.PatchPublicDelegatedPrefixeRequest.pb( - compute.PatchPublicDelegatedPrefixeRequest() + pb_message = compute.WithdrawPublicDelegatedPrefixeRequest.pb( + compute.WithdrawPublicDelegatedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -3497,7 +4959,7 @@ def test_patch_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.PatchPublicDelegatedPrefixeRequest() + request = compute.WithdrawPublicDelegatedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -3505,7 +4967,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.patch( + client.withdraw( request, metadata=[ ("key", "val"), @@ -3517,8 +4979,8 @@ def test_patch_rest_interceptors(null_interceptor): post.assert_called_once() -def test_patch_rest_bad_request( - transport: str = "rest", request_type=compute.PatchPublicDelegatedPrefixeRequest +def test_withdraw_rest_bad_request( + transport: str = "rest", request_type=compute.WithdrawPublicDelegatedPrefixeRequest ): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3542,10 +5004,10 @@ def test_patch_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.patch(request) + client.withdraw(request) -def test_patch_rest_flattened(): +def test_withdraw_rest_flattened(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3568,9 +5030,6 @@ def test_patch_rest_flattened(): project="project_value", region="region_value", public_delegated_prefix="public_delegated_prefix_value", - public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ), ) mock_args.update(sample_request) @@ -3583,20 +5042,20 @@ def test_patch_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.patch(**mock_args) + client.withdraw(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" + "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}/withdraw" % client.transport._host, args[1], ) -def test_patch_rest_flattened_error(transport: str = "rest"): +def test_withdraw_rest_flattened_error(transport: str = "rest"): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3605,18 +5064,15 @@ def test_patch_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.patch( - compute.PatchPublicDelegatedPrefixeRequest(), + client.withdraw( + compute.WithdrawPublicDelegatedPrefixeRequest(), project="project_value", region="region_value", public_delegated_prefix="public_delegated_prefix_value", - public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ), ) -def test_patch_rest_error(): +def test_withdraw_rest_error(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -3625,11 +5081,11 @@ def test_patch_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.PatchPublicDelegatedPrefixeRequest, + compute.WithdrawPublicDelegatedPrefixeRequest, dict, ], ) -def test_patch_unary_rest(request_type): +def test_withdraw_unary_rest(request_type): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3641,106 +5097,6 @@ def test_patch_unary_rest(request_type): "region": "sample2", "public_delegated_prefix": "sample3", } - request_init["public_delegated_prefix_resource"] = { - "creation_timestamp": "creation_timestamp_value", - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "ip_cidr_range": "ip_cidr_range_value", - "is_live_migration": True, - "kind": "kind_value", - "name": "name_value", - "parent_prefix": "parent_prefix_value", - "public_delegated_sub_prefixs": [ - { - "delegatee_project": "delegatee_project_value", - "description": "description_value", - "ip_cidr_range": "ip_cidr_range_value", - "is_address": True, - "name": "name_value", - "region": "region_value", - "status": "status_value", - } - ], - "region": "region_value", - "self_link": "self_link_value", - "status": "status_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = compute.PatchPublicDelegatedPrefixeRequest.meta.fields[ - "public_delegated_prefix_resource" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "public_delegated_prefix_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["public_delegated_prefix_resource"][field]) - ): - del request_init["public_delegated_prefix_resource"][field][i][ - subfield - ] - else: - del request_init["public_delegated_prefix_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3780,14 +5136,14 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch_unary(request) + response = client.withdraw_unary(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Operation) -def test_patch_unary_rest_required_fields( - request_type=compute.PatchPublicDelegatedPrefixeRequest, +def test_withdraw_unary_rest_required_fields( + request_type=compute.WithdrawPublicDelegatedPrefixeRequest, ): transport_class = transports.PublicDelegatedPrefixesRestTransport @@ -3809,7 +5165,7 @@ def test_patch_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).withdraw._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -3820,7 +5176,7 @@ def test_patch_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).withdraw._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -3852,10 +5208,9 @@ def test_patch_unary_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -3868,26 +5223,25 @@ def test_patch_unary_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch_unary(request) + response = client.withdraw_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_patch_unary_rest_unset_required_fields(): +def test_withdraw_unary_rest_unset_required_fields(): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.patch._get_unset_required_fields({}) + unset_fields = transport.withdraw._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( ( "project", "publicDelegatedPrefix", - "publicDelegatedPrefixResource", "region", ) ) @@ -3895,7 +5249,7 @@ def test_patch_unary_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_patch_unary_rest_interceptors(null_interceptor): +def test_withdraw_unary_rest_interceptors(null_interceptor): transport = transports.PublicDelegatedPrefixesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3908,14 +5262,14 @@ def test_patch_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "post_patch" + transports.PublicDelegatedPrefixesRestInterceptor, "post_withdraw" ) as post, mock.patch.object( - transports.PublicDelegatedPrefixesRestInterceptor, "pre_patch" + transports.PublicDelegatedPrefixesRestInterceptor, "pre_withdraw" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.PatchPublicDelegatedPrefixeRequest.pb( - compute.PatchPublicDelegatedPrefixeRequest() + pb_message = compute.WithdrawPublicDelegatedPrefixeRequest.pb( + compute.WithdrawPublicDelegatedPrefixeRequest() ) transcode.return_value = { "method": "post", @@ -3929,7 +5283,7 @@ def test_patch_unary_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.PatchPublicDelegatedPrefixeRequest() + request = compute.WithdrawPublicDelegatedPrefixeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -3937,7 +5291,7 @@ def test_patch_unary_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.patch_unary( + client.withdraw_unary( request, metadata=[ ("key", "val"), @@ -3949,8 +5303,8 @@ def test_patch_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_patch_unary_rest_bad_request( - transport: str = "rest", request_type=compute.PatchPublicDelegatedPrefixeRequest +def test_withdraw_unary_rest_bad_request( + transport: str = "rest", request_type=compute.WithdrawPublicDelegatedPrefixeRequest ): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3974,10 +5328,10 @@ def test_patch_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.patch_unary(request) + client.withdraw_unary(request) -def test_patch_unary_rest_flattened(): +def test_withdraw_unary_rest_flattened(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4000,9 +5354,6 @@ def test_patch_unary_rest_flattened(): project="project_value", region="region_value", public_delegated_prefix="public_delegated_prefix_value", - public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ), ) mock_args.update(sample_request) @@ -4015,20 +5366,20 @@ def test_patch_unary_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.patch_unary(**mock_args) + client.withdraw_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" + "%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}/withdraw" % client.transport._host, args[1], ) -def test_patch_unary_rest_flattened_error(transport: str = "rest"): +def test_withdraw_unary_rest_flattened_error(transport: str = "rest"): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4037,18 +5388,15 @@ def test_patch_unary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.patch_unary( - compute.PatchPublicDelegatedPrefixeRequest(), + client.withdraw_unary( + compute.WithdrawPublicDelegatedPrefixeRequest(), project="project_value", region="region_value", public_delegated_prefix="public_delegated_prefix_value", - public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ), ) -def test_patch_unary_rest_error(): +def test_withdraw_unary_rest_error(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4165,11 +5513,13 @@ def test_public_delegated_prefixes_base_transport(): # raise NotImplementedError. methods = ( "aggregated_list", + "announce", "delete", "get", "insert", "list", "patch", + "withdraw", ) for method in methods: with pytest.raises(NotImplementedError): @@ -4310,6 +5660,9 @@ def test_public_delegated_prefixes_client_transport_session_collision(transport_ session1 = client1.transport.aggregated_list._session session2 = client2.transport.aggregated_list._session assert session1 != session2 + session1 = client1.transport.announce._session + session2 = client2.transport.announce._session + assert session1 != session2 session1 = client1.transport.delete._session session2 = client2.transport.delete._session assert session1 != session2 @@ -4325,6 +5678,9 @@ def test_public_delegated_prefixes_client_transport_session_collision(transport_ session1 = client1.transport.patch._session session2 = client2.transport.patch._session assert session1 != session2 + session1 = client1.transport.withdraw._session + session2 = client2.transport.withdraw._session + assert session1 != session2 def test_common_billing_account_path(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py index a4464986f13e..e23afc2f1faf 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py @@ -2453,6 +2453,12 @@ def test_insert_rest(request_type): "region": "region_value", "security_policy": "security_policy_value", "security_settings": { + "aws_v4_authentication": { + "access_key": "access_key_value", + "access_key_id": "access_key_id_value", + "access_key_version": "access_key_version_value", + "origin_region": "origin_region_value", + }, "client_tls_policy": "client_tls_policy_value", "subject_alt_names": [ "subject_alt_names_value1", @@ -2464,6 +2470,7 @@ def test_insert_rest(request_type): "session_affinity": "session_affinity_value", "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, + "used_by": [{"reference": "reference_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -3014,6 +3021,12 @@ def test_insert_unary_rest(request_type): "region": "region_value", "security_policy": "security_policy_value", "security_settings": { + "aws_v4_authentication": { + "access_key": "access_key_value", + "access_key_id": "access_key_id_value", + "access_key_version": "access_key_version_value", + "origin_region": "origin_region_value", + }, "client_tls_policy": "client_tls_policy_value", "subject_alt_names": [ "subject_alt_names_value1", @@ -3025,6 +3038,7 @@ def test_insert_unary_rest(request_type): "session_affinity": "session_affinity_value", "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, + "used_by": [{"reference": "reference_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -3762,357 +3776,95 @@ def test_list_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - compute.PatchRegionBackendServiceRequest, + compute.ListUsableRegionBackendServicesRequest, dict, ], ) -def test_patch_rest(request_type): +def test_list_usable_rest(request_type): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "project": "sample1", - "region": "sample2", - "backend_service": "sample3", - } - request_init["backend_service_resource"] = { - "affinity_cookie_ttl_sec": 2432, - "backends": [ - { - "balancing_mode": "balancing_mode_value", - "capacity_scaler": 0.1575, - "description": "description_value", - "failover": True, - "group": "group_value", - "max_connections": 1608, - "max_connections_per_endpoint": 2990, - "max_connections_per_instance": 2978, - "max_rate": 849, - "max_rate_per_endpoint": 0.22310000000000002, - "max_rate_per_instance": 0.22190000000000001, - "max_utilization": 0.1633, - } - ], - "cdn_policy": { - "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], - "cache_key_policy": { - "include_host": True, - "include_http_headers": [ - "include_http_headers_value1", - "include_http_headers_value2", - ], - "include_named_cookies": [ - "include_named_cookies_value1", - "include_named_cookies_value2", - ], - "include_protocol": True, - "include_query_string": True, - "query_string_blacklist": [ - "query_string_blacklist_value1", - "query_string_blacklist_value2", - ], - "query_string_whitelist": [ - "query_string_whitelist_value1", - "query_string_whitelist_value2", - ], - }, - "cache_mode": "cache_mode_value", - "client_ttl": 1074, - "default_ttl": 1176, - "max_ttl": 761, - "negative_caching": True, - "negative_caching_policy": [{"code": 411, "ttl": 340}], - "request_coalescing": True, - "serve_while_stale": 1813, - "signed_url_cache_max_age_sec": 2890, - "signed_url_key_names": [ - "signed_url_key_names_value1", - "signed_url_key_names_value2", - ], - }, - "circuit_breakers": { - "max_connections": 1608, - "max_pending_requests": 2149, - "max_requests": 1313, - "max_requests_per_connection": 2902, - "max_retries": 1187, - }, - "compression_mode": "compression_mode_value", - "connection_draining": {"draining_timeout_sec": 2124}, - "connection_tracking_policy": { - "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", - "enable_strong_affinity": True, - "idle_timeout_sec": 1694, - "tracking_mode": "tracking_mode_value", - }, - "consistent_hash": { - "http_cookie": { - "name": "name_value", - "path": "path_value", - "ttl": {"nanos": 543, "seconds": 751}, - }, - "http_header_name": "http_header_name_value", - "minimum_ring_size": 1829, - }, - "creation_timestamp": "creation_timestamp_value", - "custom_request_headers": [ - "custom_request_headers_value1", - "custom_request_headers_value2", - ], - "custom_response_headers": [ - "custom_response_headers_value1", - "custom_response_headers_value2", - ], - "description": "description_value", - "edge_security_policy": "edge_security_policy_value", - "enable_c_d_n": True, - "failover_policy": { - "disable_connection_drain_on_failover": True, - "drop_traffic_if_unhealthy": True, - "failover_ratio": 0.1494, - }, - "fingerprint": "fingerprint_value", - "health_checks": ["health_checks_value1", "health_checks_value2"], - "iap": { - "enabled": True, - "oauth2_client_id": "oauth2_client_id_value", - "oauth2_client_secret": "oauth2_client_secret_value", - "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", - }, - "id": 205, - "kind": "kind_value", - "load_balancing_scheme": "load_balancing_scheme_value", - "locality_lb_policies": [ - { - "custom_policy": {"data": "data_value", "name": "name_value"}, - "policy": {"name": "name_value"}, - } - ], - "locality_lb_policy": "locality_lb_policy_value", - "log_config": { - "enable": True, - "optional_fields": ["optional_fields_value1", "optional_fields_value2"], - "optional_mode": "optional_mode_value", - "sample_rate": 0.1165, - }, - "max_stream_duration": {}, - "metadatas": {}, - "name": "name_value", - "network": "network_value", - "outlier_detection": { - "base_ejection_time": {}, - "consecutive_errors": 1956, - "consecutive_gateway_failure": 2880, - "enforcing_consecutive_errors": 3006, - "enforcing_consecutive_gateway_failure": 3930, - "enforcing_success_rate": 2334, - "interval": {}, - "max_ejection_percent": 2118, - "success_rate_minimum_hosts": 2799, - "success_rate_request_volume": 2915, - "success_rate_stdev_factor": 2663, - }, - "port": 453, - "port_name": "port_name_value", - "protocol": "protocol_value", - "region": "region_value", - "security_policy": "security_policy_value", - "security_settings": { - "client_tls_policy": "client_tls_policy_value", - "subject_alt_names": [ - "subject_alt_names_value1", - "subject_alt_names_value2", - ], - }, - "self_link": "self_link_value", - "service_bindings": ["service_bindings_value1", "service_bindings_value2"], - "session_affinity": "session_affinity_value", - "subsetting": {"policy": "policy_value"}, - "timeout_sec": 1185, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) - # Determine if the message type is proto-plus or protobuf - test_field = compute.PatchRegionBackendServiceRequest.meta.fields[ - "backend_service_resource" - ] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceListUsable( + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + ) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.BackendServiceListUsable.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_usable(request) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsablePager) + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - subfields_not_in_runtime = [] +def test_list_usable_rest_required_fields( + request_type=compute.ListUsableRegionBackendServicesRequest, +): + transport_class = transports.RegionBackendServicesRestTransport - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "backend_service_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backend_service_resource"][field])): - del request_init["backend_service_resource"][field][i][subfield] - else: - del request_init["backend_service_resource"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.Operation( - client_operation_id="client_operation_id_value", - creation_timestamp="creation_timestamp_value", - description="description_value", - end_time="end_time_value", - http_error_message="http_error_message_value", - http_error_status_code=2374, - id=205, - insert_time="insert_time_value", - kind="kind_value", - name="name_value", - operation_group_id="operation_group_id_value", - operation_type="operation_type_value", - progress=885, - region="region_value", - self_link="self_link_value", - start_time="start_time_value", - status=compute.Operation.Status.DONE, - status_message="status_message_value", - target_id=947, - target_link="target_link_value", - user="user_value", - zone="zone_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.patch(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, extended_operation.ExtendedOperation) - assert response.client_operation_id == "client_operation_id_value" - assert response.creation_timestamp == "creation_timestamp_value" - assert response.description == "description_value" - assert response.end_time == "end_time_value" - assert response.http_error_message == "http_error_message_value" - assert response.http_error_status_code == 2374 - assert response.id == 205 - assert response.insert_time == "insert_time_value" - assert response.kind == "kind_value" - assert response.name == "name_value" - assert response.operation_group_id == "operation_group_id_value" - assert response.operation_type == "operation_type_value" - assert response.progress == 885 - assert response.region == "region_value" - assert response.self_link == "self_link_value" - assert response.start_time == "start_time_value" - assert response.status == compute.Operation.Status.DONE - assert response.status_message == "status_message_value" - assert response.target_id == 947 - assert response.target_link == "target_link_value" - assert response.user == "user_value" - assert response.zone == "zone_value" - - -def test_patch_rest_required_fields( - request_type=compute.PatchRegionBackendServiceRequest, -): - transport_class = transports.RegionBackendServicesRestTransport - - request_init = {} - request_init["backend_service"] = "" - request_init["project"] = "" - request_init["region"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped + # verify fields with default values are dropped unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).list_usable._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["backendService"] = "backend_service_value" jsonified_request["project"] = "project_value" jsonified_request["region"] = "region_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).list_usable._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "backendService" in jsonified_request - assert jsonified_request["backendService"] == "backend_service_value" assert "project" in jsonified_request assert jsonified_request["project"] == "project_value" assert "region" in jsonified_request @@ -4125,7 +3877,7 @@ def test_patch_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.Operation() + return_value = compute.BackendServiceListUsable() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4137,41 +3889,46 @@ def test_patch_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.BackendServiceListUsable.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch(request) + response = client.list_usable(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_patch_rest_unset_required_fields(): +def test_list_usable_rest_unset_required_fields(): transport = transports.RegionBackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.patch._get_unset_required_fields({}) + unset_fields = transport.list_usable._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) & set( ( - "backendService", - "backendServiceResource", "project", "region", ) @@ -4180,7 +3937,7 @@ def test_patch_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_patch_rest_interceptors(null_interceptor): +def test_list_usable_rest_interceptors(null_interceptor): transport = transports.RegionBackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4193,14 +3950,14 @@ def test_patch_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionBackendServicesRestInterceptor, "post_patch" + transports.RegionBackendServicesRestInterceptor, "post_list_usable" ) as post, mock.patch.object( - transports.RegionBackendServicesRestInterceptor, "pre_patch" + transports.RegionBackendServicesRestInterceptor, "pre_list_usable" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.PatchRegionBackendServiceRequest.pb( - compute.PatchRegionBackendServiceRequest() + pb_message = compute.ListUsableRegionBackendServicesRequest.pb( + compute.ListUsableRegionBackendServicesRequest() ) transcode.return_value = { "method": "post", @@ -4212,17 +3969,19 @@ def test_patch_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.Operation.to_json(compute.Operation()) + req.return_value._content = compute.BackendServiceListUsable.to_json( + compute.BackendServiceListUsable() + ) - request = compute.PatchRegionBackendServiceRequest() + request = compute.ListUsableRegionBackendServicesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.Operation() + post.return_value = compute.BackendServiceListUsable() - client.patch( + client.list_usable( request, metadata=[ ("key", "val"), @@ -4234,8 +3993,8 @@ def test_patch_rest_interceptors(null_interceptor): post.assert_called_once() -def test_patch_rest_bad_request( - transport: str = "rest", request_type=compute.PatchRegionBackendServiceRequest +def test_list_usable_rest_bad_request( + transport: str = "rest", request_type=compute.ListUsableRegionBackendServicesRequest ): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4243,11 +4002,7 @@ def test_patch_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "project": "sample1", - "region": "sample2", - "backend_service": "sample3", - } + request_init = {"project": "sample1", "region": "sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4259,10 +4014,10 @@ def test_patch_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.patch(request) + client.list_usable(request) -def test_patch_rest_flattened(): +def test_list_usable_rest_flattened(): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4271,23 +4026,15 @@ def test_patch_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Operation() + return_value = compute.BackendServiceListUsable() # get arguments that satisfy an http rule for this method - sample_request = { - "project": "sample1", - "region": "sample2", - "backend_service": "sample3", - } + sample_request = {"project": "sample1", "region": "sample2"} # get truthy value for each flattened field mock_args = dict( project="project_value", region="region_value", - backend_service="backend_service_value", - backend_service_resource=compute.BackendService( - affinity_cookie_ttl_sec=2432 - ), ) mock_args.update(sample_request) @@ -4295,25 +4042,25 @@ def test_patch_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.BackendServiceListUsable.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.patch(**mock_args) + client.list_usable(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" + "%s/compute/v1/projects/{project}/regions/{region}/backendServices/listUsable" % client.transport._host, args[1], ) -def test_patch_rest_flattened_error(transport: str = "rest"): +def test_list_usable_rest_flattened_error(transport: str = "rest"): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4322,22 +4069,73 @@ def test_patch_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.patch( - compute.PatchRegionBackendServiceRequest(), + client.list_usable( + compute.ListUsableRegionBackendServicesRequest(), project="project_value", region="region_value", - backend_service="backend_service_value", - backend_service_resource=compute.BackendService( - affinity_cookie_ttl_sec=2432 - ), ) -def test_patch_rest_error(): +def test_list_usable_rest_pager(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.BackendServiceListUsable( + items=[ + compute.BackendService(), + compute.BackendService(), + compute.BackendService(), + ], + next_page_token="abc", + ), + compute.BackendServiceListUsable( + items=[], + next_page_token="def", + ), + compute.BackendServiceListUsable( + items=[ + compute.BackendService(), + ], + next_page_token="ghi", + ), + compute.BackendServiceListUsable( + items=[ + compute.BackendService(), + compute.BackendService(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.BackendServiceListUsable.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1", "region": "sample2"} + + pager = client.list_usable(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.BackendService) for i in results) + + pages = list(client.list_usable(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", @@ -4346,7 +4144,7 @@ def test_patch_rest_error(): dict, ], ) -def test_patch_unary_rest(request_type): +def test_patch_rest(request_type): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4501,6 +4299,12 @@ def test_patch_unary_rest(request_type): "region": "region_value", "security_policy": "security_policy_value", "security_settings": { + "aws_v4_authentication": { + "access_key": "access_key_value", + "access_key_id": "access_key_id_value", + "access_key_version": "access_key_version_value", + "origin_region": "origin_region_value", + }, "client_tls_policy": "client_tls_policy_value", "subject_alt_names": [ "subject_alt_names_value1", @@ -4512,14 +4316,1900 @@ def test_patch_unary_rest(request_type): "session_affinity": "session_affinity_value", "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, + "used_by": [{"reference": "reference_value"}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_patch_rest_required_fields( + request_type=compute.PatchRegionBackendServiceRequest, +): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = "backend_service_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == "backend_service_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "backendService", + "backendServiceResource", + "project", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionBackendServiceRequest.pb( + compute.PatchRegionBackendServiceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionBackendServiceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRegionBackendServiceRequest +): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + backend_service="backend_service_value", + backend_service_resource=compute.BackendService( + affinity_cookie_ttl_sec=2432 + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionBackendServiceRequest(), + project="project_value", + region="region_value", + backend_service="backend_service_value", + backend_service_resource=compute.BackendService( + affinity_cookie_ttl_sec=2432 + ), + ) + + +def test_patch_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchRegionBackendServiceRequest, + dict, + ], +) +def test_patch_unary_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request_init["backend_service_resource"] = { + "affinity_cookie_ttl_sec": 2432, + "backends": [ + { + "balancing_mode": "balancing_mode_value", + "capacity_scaler": 0.1575, + "description": "description_value", + "failover": True, + "group": "group_value", + "max_connections": 1608, + "max_connections_per_endpoint": 2990, + "max_connections_per_instance": 2978, + "max_rate": 849, + "max_rate_per_endpoint": 0.22310000000000002, + "max_rate_per_instance": 0.22190000000000001, + "max_utilization": 0.1633, + } + ], + "cdn_policy": { + "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], + "cache_key_policy": { + "include_host": True, + "include_http_headers": [ + "include_http_headers_value1", + "include_http_headers_value2", + ], + "include_named_cookies": [ + "include_named_cookies_value1", + "include_named_cookies_value2", + ], + "include_protocol": True, + "include_query_string": True, + "query_string_blacklist": [ + "query_string_blacklist_value1", + "query_string_blacklist_value2", + ], + "query_string_whitelist": [ + "query_string_whitelist_value1", + "query_string_whitelist_value2", + ], + }, + "cache_mode": "cache_mode_value", + "client_ttl": 1074, + "default_ttl": 1176, + "max_ttl": 761, + "negative_caching": True, + "negative_caching_policy": [{"code": 411, "ttl": 340}], + "request_coalescing": True, + "serve_while_stale": 1813, + "signed_url_cache_max_age_sec": 2890, + "signed_url_key_names": [ + "signed_url_key_names_value1", + "signed_url_key_names_value2", + ], + }, + "circuit_breakers": { + "max_connections": 1608, + "max_pending_requests": 2149, + "max_requests": 1313, + "max_requests_per_connection": 2902, + "max_retries": 1187, + }, + "compression_mode": "compression_mode_value", + "connection_draining": {"draining_timeout_sec": 2124}, + "connection_tracking_policy": { + "connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value", + "enable_strong_affinity": True, + "idle_timeout_sec": 1694, + "tracking_mode": "tracking_mode_value", + }, + "consistent_hash": { + "http_cookie": { + "name": "name_value", + "path": "path_value", + "ttl": {"nanos": 543, "seconds": 751}, + }, + "http_header_name": "http_header_name_value", + "minimum_ring_size": 1829, + }, + "creation_timestamp": "creation_timestamp_value", + "custom_request_headers": [ + "custom_request_headers_value1", + "custom_request_headers_value2", + ], + "custom_response_headers": [ + "custom_response_headers_value1", + "custom_response_headers_value2", + ], + "description": "description_value", + "edge_security_policy": "edge_security_policy_value", + "enable_c_d_n": True, + "failover_policy": { + "disable_connection_drain_on_failover": True, + "drop_traffic_if_unhealthy": True, + "failover_ratio": 0.1494, + }, + "fingerprint": "fingerprint_value", + "health_checks": ["health_checks_value1", "health_checks_value2"], + "iap": { + "enabled": True, + "oauth2_client_id": "oauth2_client_id_value", + "oauth2_client_secret": "oauth2_client_secret_value", + "oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value", + }, + "id": 205, + "kind": "kind_value", + "load_balancing_scheme": "load_balancing_scheme_value", + "locality_lb_policies": [ + { + "custom_policy": {"data": "data_value", "name": "name_value"}, + "policy": {"name": "name_value"}, + } + ], + "locality_lb_policy": "locality_lb_policy_value", + "log_config": { + "enable": True, + "optional_fields": ["optional_fields_value1", "optional_fields_value2"], + "optional_mode": "optional_mode_value", + "sample_rate": 0.1165, + }, + "max_stream_duration": {}, + "metadatas": {}, + "name": "name_value", + "network": "network_value", + "outlier_detection": { + "base_ejection_time": {}, + "consecutive_errors": 1956, + "consecutive_gateway_failure": 2880, + "enforcing_consecutive_errors": 3006, + "enforcing_consecutive_gateway_failure": 3930, + "enforcing_success_rate": 2334, + "interval": {}, + "max_ejection_percent": 2118, + "success_rate_minimum_hosts": 2799, + "success_rate_request_volume": 2915, + "success_rate_stdev_factor": 2663, + }, + "port": 453, + "port_name": "port_name_value", + "protocol": "protocol_value", + "region": "region_value", + "security_policy": "security_policy_value", + "security_settings": { + "aws_v4_authentication": { + "access_key": "access_key_value", + "access_key_id": "access_key_id_value", + "access_key_version": "access_key_version_value", + "origin_region": "origin_region_value", + }, + "client_tls_policy": "client_tls_policy_value", + "subject_alt_names": [ + "subject_alt_names_value1", + "subject_alt_names_value2", + ], + }, + "self_link": "self_link_value", + "service_bindings": ["service_bindings_value1", "service_bindings_value2"], + "session_affinity": "session_affinity_value", + "subsetting": {"policy": "policy_value"}, + "timeout_sec": 1185, + "used_by": [{"reference": "reference_value"}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionBackendServiceRequest.meta.fields[ + "backend_service_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_service_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_service_resource"][field])): + del request_init["backend_service_resource"][field][i][subfield] + else: + del request_init["backend_service_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields( + request_type=compute.PatchRegionBackendServiceRequest, +): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = "backend_service_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == "backend_service_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "backendService", + "backendServiceResource", + "project", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionBackendServiceRequest.pb( + compute.PatchRegionBackendServiceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionBackendServiceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRegionBackendServiceRequest +): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + backend_service="backend_service_value", + backend_service_resource=compute.BackendService( + affinity_cookie_ttl_sec=2432 + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" + % client.transport._host, + args[1], + ) + + +def test_patch_unary_rest_flattened_error(transport: str = "rest"): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRegionBackendServiceRequest(), + project="project_value", + region="region_value", + backend_service="backend_service_value", + backend_service_resource=compute.BackendService( + affinity_cookie_ttl_sec=2432 + ), + ) + + +def test_patch_unary_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetIamPolicyRegionBackendServiceRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_policy_request_resource"] = { + "bindings": [ + { + "binding_id": "binding_id_value", + "condition": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "members": ["members_value1", "members_value2"], + "role": "role_value", + } + ], + "etag": "etag_value", + "policy": { + "audit_configs": [ + { + "audit_log_configs": [ + { + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "ignore_child_exemptions": True, + "log_type": "log_type_value", + } + ], + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "service": "service_value", + } + ], + "bindings": {}, + "etag": "etag_value", + "iam_owned": True, + "rules": [ + { + "action": "action_value", + "conditions": [ + { + "iam": "iam_value", + "op": "op_value", + "svc": "svc_value", + "sys": "sys_value", + "values": ["values_value1", "values_value2"], + } + ], + "description": "description_value", + "ins": ["ins_value1", "ins_value2"], + "log_configs": [ + { + "cloud_audit": { + "authorization_logging_options": { + "permission_type": "permission_type_value" + }, + "log_name": "log_name_value", + }, + "counter": { + "custom_fields": [ + {"name": "name_value", "value": "value_value"} + ], + "field": "field_value", + "metric": "metric_value", + }, + "data_access": {"log_mode": "log_mode_value"}, + } + ], + "not_ins": ["not_ins_value1", "not_ins_value2"], + "permissions": ["permissions_value1", "permissions_value2"], + } + ], + "version": 774, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyRegionBackendServiceRequest.meta.fields[ + "region_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_policy_request_resource"][field]) + ): + del request_init["region_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_policy_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag="etag_value", + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields( + request_type=compute.SetIamPolicyRegionBackendServiceRequest, +): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "region", + "regionSetPolicyRequestResource", + "resource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "post_set_iam_policy" + ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "pre_set_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyRegionBackendServiceRequest.pb( + compute.SetIamPolicyRegionBackendServiceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyRegionBackendServiceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", + request_type=compute.SetIamPolicyRegionBackendServiceRequest, +): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + resource="resource_value", + region_set_policy_request_resource=compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendServices/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyRegionBackendServiceRequest(), + project="project_value", + region="region_value", + resource="resource_value", + region_set_policy_request_resource=compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), + ) + + +def test_set_iam_policy_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetSecurityPolicyRegionBackendServiceRequest, + dict, + ], +) +def test_set_security_policy_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request_init["security_policy_reference_resource"] = { + "security_policy": "security_policy_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSecurityPolicyRegionBackendServiceRequest.meta.fields[ + "security_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_reference_resource"][field]) + ): + del request_init["security_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_reference_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_security_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_set_security_policy_rest_required_fields( + request_type=compute.SetSecurityPolicyRegionBackendServiceRequest, +): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = "backend_service_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == "backend_service_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_security_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_security_policy_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_security_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "backendService", + "project", + "region", + "securityPolicyReferenceResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_security_policy_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "post_set_security_policy" + ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "pre_set_security_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSecurityPolicyRegionBackendServiceRequest.pb( + compute.SetSecurityPolicyRegionBackendServiceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSecurityPolicyRegionBackendServiceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_security_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_security_policy_rest_bad_request( + transport: str = "rest", + request_type=compute.SetSecurityPolicyRegionBackendServiceRequest, +): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_security_policy(request) + + +def test_set_security_policy_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + backend_service="backend_service_value", + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_security_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}/setSecurityPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_security_policy_rest_flattened_error(transport: str = "rest"): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_security_policy( + compute.SetSecurityPolicyRegionBackendServiceRequest(), + project="project_value", + region="region_value", + backend_service="backend_service_value", + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" + ), + ) + + +def test_set_security_policy_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetSecurityPolicyRegionBackendServiceRequest, + dict, + ], +) +def test_set_security_policy_unary_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request_init["security_policy_reference_resource"] = { + "security_policy": "security_policy_value" } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = compute.PatchRegionBackendServiceRequest.meta.fields[ - "backend_service_resource" + test_field = compute.SetSecurityPolicyRegionBackendServiceRequest.meta.fields[ + "security_policy_reference_resource" ] def get_message_fields(field): @@ -4549,7 +6239,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime for field, value in request_init[ - "backend_service_resource" + "security_policy_reference_resource" ].items(): # pragma: NO COVER result = None is_repeated = False @@ -4580,10 +6270,14 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["backend_service_resource"][field])): - del request_init["backend_service_resource"][field][i][subfield] + for i in range( + 0, len(request_init["security_policy_reference_resource"][field]) + ): + del request_init["security_policy_reference_resource"][field][i][ + subfield + ] else: - del request_init["backend_service_resource"][field][subfield] + del request_init["security_policy_reference_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4623,14 +6317,14 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch_unary(request) + response = client.set_security_policy_unary(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Operation) -def test_patch_unary_rest_required_fields( - request_type=compute.PatchRegionBackendServiceRequest, +def test_set_security_policy_unary_rest_required_fields( + request_type=compute.SetSecurityPolicyRegionBackendServiceRequest, ): transport_class = transports.RegionBackendServicesRestTransport @@ -4652,7 +6346,7 @@ def test_patch_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).set_security_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -4663,7 +6357,7 @@ def test_patch_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).set_security_policy._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -4695,7 +6389,7 @@ def test_patch_unary_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -4711,34 +6405,34 @@ def test_patch_unary_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch_unary(request) + response = client.set_security_policy_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_patch_unary_rest_unset_required_fields(): +def test_set_security_policy_unary_rest_unset_required_fields(): transport = transports.RegionBackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.patch._get_unset_required_fields({}) + unset_fields = transport.set_security_policy._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( ( "backendService", - "backendServiceResource", "project", "region", + "securityPolicyReferenceResource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_patch_unary_rest_interceptors(null_interceptor): +def test_set_security_policy_unary_rest_interceptors(null_interceptor): transport = transports.RegionBackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4751,14 +6445,14 @@ def test_patch_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionBackendServicesRestInterceptor, "post_patch" + transports.RegionBackendServicesRestInterceptor, "post_set_security_policy" ) as post, mock.patch.object( - transports.RegionBackendServicesRestInterceptor, "pre_patch" + transports.RegionBackendServicesRestInterceptor, "pre_set_security_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.PatchRegionBackendServiceRequest.pb( - compute.PatchRegionBackendServiceRequest() + pb_message = compute.SetSecurityPolicyRegionBackendServiceRequest.pb( + compute.SetSecurityPolicyRegionBackendServiceRequest() ) transcode.return_value = { "method": "post", @@ -4772,7 +6466,7 @@ def test_patch_unary_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.PatchRegionBackendServiceRequest() + request = compute.SetSecurityPolicyRegionBackendServiceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4780,7 +6474,7 @@ def test_patch_unary_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.patch_unary( + client.set_security_policy_unary( request, metadata=[ ("key", "val"), @@ -4792,8 +6486,9 @@ def test_patch_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_patch_unary_rest_bad_request( - transport: str = "rest", request_type=compute.PatchRegionBackendServiceRequest +def test_set_security_policy_unary_rest_bad_request( + transport: str = "rest", + request_type=compute.SetSecurityPolicyRegionBackendServiceRequest, ): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4817,10 +6512,10 @@ def test_patch_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.patch_unary(request) + client.set_security_policy_unary(request) -def test_patch_unary_rest_flattened(): +def test_set_security_policy_unary_rest_flattened(): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4843,8 +6538,8 @@ def test_patch_unary_rest_flattened(): project="project_value", region="region_value", backend_service="backend_service_value", - backend_service_resource=compute.BackendService( - affinity_cookie_ttl_sec=2432 + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" ), ) mock_args.update(sample_request) @@ -4858,20 +6553,20 @@ def test_patch_unary_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.patch_unary(**mock_args) + client.set_security_policy_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" + "%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}/setSecurityPolicy" % client.transport._host, args[1], ) -def test_patch_unary_rest_flattened_error(transport: str = "rest"): +def test_set_security_policy_unary_rest_flattened_error(transport: str = "rest"): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4880,18 +6575,18 @@ def test_patch_unary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.patch_unary( - compute.PatchRegionBackendServiceRequest(), + client.set_security_policy_unary( + compute.SetSecurityPolicyRegionBackendServiceRequest(), project="project_value", region="region_value", backend_service="backend_service_value", - backend_service_resource=compute.BackendService( - affinity_cookie_ttl_sec=2432 + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" ), ) -def test_patch_unary_rest_error(): +def test_set_security_policy_unary_rest_error(): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4900,11 +6595,11 @@ def test_patch_unary_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.SetIamPolicyRegionBackendServiceRequest, + compute.TestIamPermissionsRegionBackendServiceRequest, dict, ], ) -def test_set_iam_policy_rest(request_type): +def test_test_iam_permissions_rest(request_type): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4912,90 +6607,16 @@ def test_set_iam_policy_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} - request_init["region_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { - "audit_log_configs": [ - { - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "ignore_child_exemptions": True, - "log_type": "log_type_value", - } - ], - "exempted_members": [ - "exempted_members_value1", - "exempted_members_value2", - ], - "service": "service_value", - } - ], - "bindings": {}, - "etag": "etag_value", - "iam_owned": True, - "rules": [ - { - "action": "action_value", - "conditions": [ - { - "iam": "iam_value", - "op": "op_value", - "svc": "svc_value", - "sys": "sys_value", - "values": ["values_value1", "values_value2"], - } - ], - "description": "description_value", - "ins": ["ins_value1", "ins_value2"], - "log_configs": [ - { - "cloud_audit": { - "authorization_logging_options": { - "permission_type": "permission_type_value" - }, - "log_name": "log_name_value", - }, - "counter": { - "custom_fields": [ - {"name": "name_value", "value": "value_value"} - ], - "field": "field_value", - "metric": "metric_value", - }, - "data_access": {"log_mode": "log_mode_value"}, - } - ], - "not_ins": ["not_ins_value1", "not_ins_value2"], - "permissions": ["permissions_value1", "permissions_value2"], - } - ], - "version": 774, - }, + request_init["test_permissions_request_resource"] = { + "permissions": ["permissions_value1", "permissions_value2"] } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = compute.SetIamPolicyRegionBackendServiceRequest.meta.fields[ - "region_set_policy_request_resource" + test_field = compute.TestIamPermissionsRegionBackendServiceRequest.meta.fields[ + "test_permissions_request_resource" ] def get_message_fields(field): @@ -5025,7 +6646,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime for field, value in request_init[ - "region_set_policy_request_resource" + "test_permissions_request_resource" ].items(): # pragma: NO COVER result = None is_repeated = False @@ -5057,44 +6678,40 @@ def get_message_fields(field): if subfield: if field_repeated: for i in range( - 0, len(request_init["region_set_policy_request_resource"][field]) + 0, len(request_init["test_permissions_request_resource"][field]) ): - del request_init["region_set_policy_request_resource"][field][i][ + del request_init["test_permissions_request_resource"][field][i][ subfield ] else: - del request_init["region_set_policy_request_resource"][field][subfield] + del request_init["test_permissions_request_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - etag="etag_value", - iam_owned=True, - version=774, + return_value = compute.TestPermissionsResponse( + permissions=["permissions_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Policy.pb(return_value) + return_value = compute.TestPermissionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_iam_policy(request) + response = client.test_iam_permissions(request) # Establish that the response is the type that we expect. - assert isinstance(response, compute.Policy) - assert response.etag == "etag_value" - assert response.iam_owned is True - assert response.version == 774 + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ["permissions_value"] -def test_set_iam_policy_rest_required_fields( - request_type=compute.SetIamPolicyRegionBackendServiceRequest, +def test_test_iam_permissions_rest_required_fields( + request_type=compute.TestIamPermissionsRegionBackendServiceRequest, ): transport_class = transports.RegionBackendServicesRestTransport @@ -5116,7 +6733,7 @@ def test_set_iam_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_iam_policy._get_unset_required_fields(jsonified_request) + ).test_iam_permissions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -5127,7 +6744,7 @@ def test_set_iam_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_iam_policy._get_unset_required_fields(jsonified_request) + ).test_iam_permissions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5145,7 +6762,7 @@ def test_set_iam_policy_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.Policy() + return_value = compute.TestPermissionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5167,40 +6784,40 @@ def test_set_iam_policy_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Policy.pb(return_value) + return_value = compute.TestPermissionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_iam_policy(request) + response = client.test_iam_permissions(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_iam_policy_rest_unset_required_fields(): +def test_test_iam_permissions_rest_unset_required_fields(): transport = transports.RegionBackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( "project", "region", - "regionSetPolicyRequestResource", "resource", + "testPermissionsRequestResource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_iam_policy_rest_interceptors(null_interceptor): +def test_test_iam_permissions_rest_interceptors(null_interceptor): transport = transports.RegionBackendServicesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5213,14 +6830,14 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionBackendServicesRestInterceptor, "post_set_iam_policy" + transports.RegionBackendServicesRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( - transports.RegionBackendServicesRestInterceptor, "pre_set_iam_policy" + transports.RegionBackendServicesRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.SetIamPolicyRegionBackendServiceRequest.pb( - compute.SetIamPolicyRegionBackendServiceRequest() + pb_message = compute.TestIamPermissionsRegionBackendServiceRequest.pb( + compute.TestIamPermissionsRegionBackendServiceRequest() ) transcode.return_value = { "method": "post", @@ -5232,17 +6849,19 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.Policy.to_json(compute.Policy()) + req.return_value._content = compute.TestPermissionsResponse.to_json( + compute.TestPermissionsResponse() + ) - request = compute.SetIamPolicyRegionBackendServiceRequest() + request = compute.TestIamPermissionsRegionBackendServiceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.Policy() + post.return_value = compute.TestPermissionsResponse() - client.set_iam_policy( + client.test_iam_permissions( request, metadata=[ ("key", "val"), @@ -5254,9 +6873,9 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): post.assert_called_once() -def test_set_iam_policy_rest_bad_request( +def test_test_iam_permissions_rest_bad_request( transport: str = "rest", - request_type=compute.SetIamPolicyRegionBackendServiceRequest, + request_type=compute.TestIamPermissionsRegionBackendServiceRequest, ): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5276,10 +6895,10 @@ def test_set_iam_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.set_iam_policy(request) + client.test_iam_permissions(request) -def test_set_iam_policy_rest_flattened(): +def test_test_iam_permissions_rest_flattened(): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5288,7 +6907,7 @@ def test_set_iam_policy_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy() + return_value = compute.TestPermissionsResponse() # get arguments that satisfy an http rule for this method sample_request = { @@ -5302,8 +6921,8 @@ def test_set_iam_policy_rest_flattened(): project="project_value", region="region_value", resource="resource_value", - region_set_policy_request_resource=compute.RegionSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] ), ) mock_args.update(sample_request) @@ -5312,25 +6931,25 @@ def test_set_iam_policy_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Policy.pb(return_value) + return_value = compute.TestPermissionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.set_iam_policy(**mock_args) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/backendServices/{resource}/setIamPolicy" + "%s/compute/v1/projects/{project}/regions/{region}/backendServices/{resource}/testIamPermissions" % client.transport._host, args[1], ) -def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5339,18 +6958,18 @@ def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_iam_policy( - compute.SetIamPolicyRegionBackendServiceRequest(), + client.test_iam_permissions( + compute.TestIamPermissionsRegionBackendServiceRequest(), project="project_value", region="region_value", resource="resource_value", - region_set_policy_request_resource=compute.RegionSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] ), ) -def test_set_iam_policy_rest_error(): +def test_test_iam_permissions_rest_error(): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5518,6 +7137,12 @@ def test_update_rest(request_type): "region": "region_value", "security_policy": "security_policy_value", "security_settings": { + "aws_v4_authentication": { + "access_key": "access_key_value", + "access_key_id": "access_key_id_value", + "access_key_version": "access_key_version_value", + "origin_region": "origin_region_value", + }, "client_tls_policy": "client_tls_policy_value", "subject_alt_names": [ "subject_alt_names_value1", @@ -5529,6 +7154,7 @@ def test_update_rest(request_type): "session_affinity": "session_affinity_value", "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, + "used_by": [{"reference": "reference_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6098,6 +7724,12 @@ def test_update_unary_rest(request_type): "region": "region_value", "security_policy": "security_policy_value", "security_settings": { + "aws_v4_authentication": { + "access_key": "access_key_value", + "access_key_id": "access_key_id_value", + "access_key_version": "access_key_version_value", + "origin_region": "origin_region_value", + }, "client_tls_policy": "client_tls_policy_value", "subject_alt_names": [ "subject_alt_names_value1", @@ -6109,6 +7741,7 @@ def test_update_unary_rest(request_type): "session_affinity": "session_affinity_value", "subsetting": {"policy": "policy_value"}, "timeout_sec": 1185, + "used_by": [{"reference": "reference_value"}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6610,8 +8243,11 @@ def test_region_backend_services_base_transport(): "get_iam_policy", "insert", "list", + "list_usable", "patch", "set_iam_policy", + "set_security_policy", + "test_iam_permissions", "update", ) for method in methods: @@ -6768,12 +8404,21 @@ def test_region_backend_services_client_transport_session_collision(transport_na session1 = client1.transport.list._session session2 = client2.transport.list._session assert session1 != session2 + session1 = client1.transport.list_usable._session + session2 = client2.transport.list_usable._session + assert session1 != session2 session1 = client1.transport.patch._session session2 = client2.transport.patch._session assert session1 != session2 session1 = client1.transport.set_iam_policy._session session2 = client2.transport.set_iam_policy._session assert session1 != session2 + session1 = client1.transport.set_security_policy._session + session2 = client2.transport.set_security_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 session1 = client1.transport.update._session session2 = client2.transport.update._session assert session1 != session2 diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py index 258e23fe2748..c58ddbf112f4 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py @@ -666,6 +666,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -730,6 +731,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py index d18c6940da14..3329fb477b40 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py @@ -2209,6 +2209,7 @@ def test_create_snapshot_rest(request_type): "description": "description_value", "disk_size_gb": 1261, "download_bytes": 1502, + "guest_os_features": [{"type_": "type__value"}], "id": 205, "kind": "kind_value", "label_fingerprint": "label_fingerprint_value", @@ -2229,6 +2230,7 @@ def test_create_snapshot_rest(request_type): "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, + "source_disk_for_recovery_checkpoint": "source_disk_for_recovery_checkpoint_value", "source_disk_id": "source_disk_id_value", "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", @@ -2653,6 +2655,7 @@ def test_create_snapshot_unary_rest(request_type): "description": "description_value", "disk_size_gb": 1261, "download_bytes": 1502, + "guest_os_features": [{"type_": "type__value"}], "id": 205, "kind": "kind_value", "label_fingerprint": "label_fingerprint_value", @@ -2673,6 +2676,7 @@ def test_create_snapshot_unary_rest(request_type): "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, + "source_disk_for_recovery_checkpoint": "source_disk_for_recovery_checkpoint_value", "source_disk_id": "source_disk_id_value", "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py index bd298a46ac8f..30d57f042efb 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py @@ -2348,7 +2348,12 @@ def test_create_instances_rest(request_type): { "fingerprint": "fingerprint_value", "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, + "preserved_state": { + "disks": {}, + "external_i_ps": {}, + "internal_i_ps": {}, + "metadata": {}, + }, "status": "status_value", } ] @@ -2795,7 +2800,12 @@ def test_create_instances_unary_rest(request_type): { "fingerprint": "fingerprint_value", "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, + "preserved_state": { + "disks": {}, + "external_i_ps": {}, + "internal_i_ps": {}, + "metadata": {}, + }, "status": "status_value", } ] @@ -5985,7 +5995,9 @@ def test_insert_rest(request_type): "named_ports": [{"name": "name_value", "port": 453}], "region": "region_value", "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, + "stateful_policy": { + "preserved_state": {"disks": {}, "external_i_ps": {}, "internal_i_ps": {}} + }, "status": { "autoscaler": "autoscaler_value", "is_stable": True, @@ -6473,7 +6485,9 @@ def test_insert_unary_rest(request_type): "named_ports": [{"name": "name_value", "port": 453}], "region": "region_value", "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, + "stateful_policy": { + "preserved_state": {"disks": {}, "external_i_ps": {}, "internal_i_ps": {}} + }, "status": { "autoscaler": "autoscaler_value", "is_stable": True, @@ -8496,7 +8510,9 @@ def test_patch_rest(request_type): "named_ports": [{"name": "name_value", "port": 453}], "region": "region_value", "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, + "stateful_policy": { + "preserved_state": {"disks": {}, "external_i_ps": {}, "internal_i_ps": {}} + }, "status": { "autoscaler": "autoscaler_value", "is_stable": True, @@ -9002,7 +9018,9 @@ def test_patch_unary_rest(request_type): "named_ports": [{"name": "name_value", "port": 453}], "region": "region_value", "self_link": "self_link_value", - "stateful_policy": {"preserved_state": {"disks": {}}}, + "stateful_policy": { + "preserved_state": {"disks": {}, "external_i_ps": {}, "internal_i_ps": {}} + }, "status": { "autoscaler": "autoscaler_value", "is_stable": True, @@ -9452,7 +9470,12 @@ def test_patch_per_instance_configs_rest(request_type): { "fingerprint": "fingerprint_value", "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, + "preserved_state": { + "disks": {}, + "external_i_ps": {}, + "internal_i_ps": {}, + "metadata": {}, + }, "status": "status_value", } ] @@ -9909,7 +9932,12 @@ def test_patch_per_instance_configs_unary_rest(request_type): { "fingerprint": "fingerprint_value", "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, + "preserved_state": { + "disks": {}, + "external_i_ps": {}, + "internal_i_ps": {}, + "metadata": {}, + }, "status": "status_value", } ] @@ -13652,7 +13680,12 @@ def test_update_per_instance_configs_rest(request_type): { "fingerprint": "fingerprint_value", "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, + "preserved_state": { + "disks": {}, + "external_i_ps": {}, + "internal_i_ps": {}, + "metadata": {}, + }, "status": "status_value", } ] @@ -14111,7 +14144,12 @@ def test_update_per_instance_configs_unary_rest(request_type): { "fingerprint": "fingerprint_value", "name": "name_value", - "preserved_state": {"disks": {}, "metadata": {}}, + "preserved_state": { + "disks": {}, + "external_i_ps": {}, + "internal_i_ps": {}, + "metadata": {}, + }, "status": "status_value", } ] diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py index b75b31876e39..47c06faa6a0b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py @@ -1692,6 +1692,7 @@ def test_insert_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -2285,6 +2286,7 @@ def test_insert_unary_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py index 3092e012ff58..b3cc92feb834 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py @@ -683,6 +683,7 @@ def test_bulk_insert_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } @@ -1269,6 +1270,7 @@ def test_bulk_insert_unary_rest(request_type): "nat_i_p": "nat_i_p_value", "network_tier": "network_tier_value", "public_ptr_domain_name": "public_ptr_domain_name_value", + "security_policy": "security_policy_value", "set_public_ptr": True, "type_": "type__value", } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py index 66b730fd5821..4485369306dd 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py @@ -597,11 +597,11 @@ def test_region_network_endpoint_groups_client_client_options_credentials_file( @pytest.mark.parametrize( "request_type", [ - compute.DeleteRegionNetworkEndpointGroupRequest, + compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, dict, ], ) -def test_delete_rest(request_type): +def test_attach_network_endpoints_rest(request_type): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -613,6 +613,101 @@ def test_delete_rest(request_type): "region": "sample2", "network_endpoint_group": "sample3", } + request_init["region_network_endpoint_groups_attach_endpoints_request_resource"] = { + "network_endpoints": [ + { + "annotations": {}, + "fqdn": "fqdn_value", + "instance": "instance_value", + "ip_address": "ip_address_value", + "port": 453, + } + ] + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest.meta.fields[ + "region_network_endpoint_groups_attach_endpoints_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_network_endpoint_groups_attach_endpoints_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_network_endpoint_groups_attach_endpoints_request_resource" + ][field] + ), + ): + del request_init[ + "region_network_endpoint_groups_attach_endpoints_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_network_endpoint_groups_attach_endpoints_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -652,7 +747,7 @@ def test_delete_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete(request) + response = client.attach_network_endpoints(request) # Establish that the response is the type that we expect. assert isinstance(response, extended_operation.ExtendedOperation) @@ -680,8 +775,8 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" -def test_delete_rest_required_fields( - request_type=compute.DeleteRegionNetworkEndpointGroupRequest, +def test_attach_network_endpoints_rest_required_fields( + request_type=compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, ): transport_class = transports.RegionNetworkEndpointGroupsRestTransport @@ -703,7 +798,7 @@ def test_delete_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).attach_network_endpoints._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -714,7 +809,7 @@ def test_delete_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).attach_network_endpoints._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -746,9 +841,10 @@ def test_delete_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -761,19 +857,19 @@ def test_delete_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete(request) + response = client.attach_network_endpoints(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_rest_unset_required_fields(): +def test_attach_network_endpoints_rest_unset_required_fields(): transport = transports.RegionNetworkEndpointGroupsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete._get_unset_required_fields({}) + unset_fields = transport.attach_network_endpoints._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( @@ -781,13 +877,14 @@ def test_delete_rest_unset_required_fields(): "networkEndpointGroup", "project", "region", + "regionNetworkEndpointGroupsAttachEndpointsRequestResource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_rest_interceptors(null_interceptor): +def test_attach_network_endpoints_rest_interceptors(null_interceptor): transport = transports.RegionNetworkEndpointGroupsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -800,14 +897,16 @@ def test_delete_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionNetworkEndpointGroupsRestInterceptor, "post_delete" + transports.RegionNetworkEndpointGroupsRestInterceptor, + "post_attach_network_endpoints", ) as post, mock.patch.object( - transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_delete" + transports.RegionNetworkEndpointGroupsRestInterceptor, + "pre_attach_network_endpoints", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.DeleteRegionNetworkEndpointGroupRequest.pb( - compute.DeleteRegionNetworkEndpointGroupRequest() + pb_message = compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest.pb( + compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest() ) transcode.return_value = { "method": "post", @@ -821,7 +920,7 @@ def test_delete_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.DeleteRegionNetworkEndpointGroupRequest() + request = compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -829,7 +928,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.delete( + client.attach_network_endpoints( request, metadata=[ ("key", "val"), @@ -841,9 +940,9 @@ def test_delete_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_rest_bad_request( +def test_attach_network_endpoints_rest_bad_request( transport: str = "rest", - request_type=compute.DeleteRegionNetworkEndpointGroupRequest, + request_type=compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, ): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -867,10 +966,10 @@ def test_delete_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete(request) + client.attach_network_endpoints(request) -def test_delete_rest_flattened(): +def test_attach_network_endpoints_rest_flattened(): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -893,6 +992,11 @@ def test_delete_rest_flattened(): project="project_value", region="region_value", network_endpoint_group="network_endpoint_group_value", + region_network_endpoint_groups_attach_endpoints_request_resource=compute.RegionNetworkEndpointGroupsAttachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ), ) mock_args.update(sample_request) @@ -905,20 +1009,20 @@ def test_delete_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete(**mock_args) + client.attach_network_endpoints(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}" + "%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints" % client.transport._host, args[1], ) -def test_delete_rest_flattened_error(transport: str = "rest"): +def test_attach_network_endpoints_rest_flattened_error(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -927,15 +1031,20 @@ def test_delete_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete( - compute.DeleteRegionNetworkEndpointGroupRequest(), + client.attach_network_endpoints( + compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest(), project="project_value", region="region_value", network_endpoint_group="network_endpoint_group_value", + region_network_endpoint_groups_attach_endpoints_request_resource=compute.RegionNetworkEndpointGroupsAttachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ), ) -def test_delete_rest_error(): +def test_attach_network_endpoints_rest_error(): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -944,11 +1053,11 @@ def test_delete_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.DeleteRegionNetworkEndpointGroupRequest, + compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, dict, ], ) -def test_delete_unary_rest(request_type): +def test_attach_network_endpoints_unary_rest(request_type): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -960,6 +1069,101 @@ def test_delete_unary_rest(request_type): "region": "sample2", "network_endpoint_group": "sample3", } + request_init["region_network_endpoint_groups_attach_endpoints_request_resource"] = { + "network_endpoints": [ + { + "annotations": {}, + "fqdn": "fqdn_value", + "instance": "instance_value", + "ip_address": "ip_address_value", + "port": 453, + } + ] + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest.meta.fields[ + "region_network_endpoint_groups_attach_endpoints_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_network_endpoint_groups_attach_endpoints_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_network_endpoint_groups_attach_endpoints_request_resource" + ][field] + ), + ): + del request_init[ + "region_network_endpoint_groups_attach_endpoints_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_network_endpoint_groups_attach_endpoints_request_resource" + ][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -999,14 +1203,14 @@ def test_delete_unary_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_unary(request) + response = client.attach_network_endpoints_unary(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Operation) -def test_delete_unary_rest_required_fields( - request_type=compute.DeleteRegionNetworkEndpointGroupRequest, +def test_attach_network_endpoints_unary_rest_required_fields( + request_type=compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, ): transport_class = transports.RegionNetworkEndpointGroupsRestTransport @@ -1028,7 +1232,7 @@ def test_delete_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).attach_network_endpoints._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1039,7 +1243,7 @@ def test_delete_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).attach_network_endpoints._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) @@ -1071,9 +1275,10 @@ def test_delete_unary_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -1086,19 +1291,19 @@ def test_delete_unary_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_unary(request) + response = client.attach_network_endpoints_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_unary_rest_unset_required_fields(): +def test_attach_network_endpoints_unary_rest_unset_required_fields(): transport = transports.RegionNetworkEndpointGroupsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete._get_unset_required_fields({}) + unset_fields = transport.attach_network_endpoints._get_unset_required_fields({}) assert set(unset_fields) == ( set(("requestId",)) & set( @@ -1106,13 +1311,14 @@ def test_delete_unary_rest_unset_required_fields(): "networkEndpointGroup", "project", "region", + "regionNetworkEndpointGroupsAttachEndpointsRequestResource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_unary_rest_interceptors(null_interceptor): +def test_attach_network_endpoints_unary_rest_interceptors(null_interceptor): transport = transports.RegionNetworkEndpointGroupsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1125,14 +1331,16 @@ def test_delete_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionNetworkEndpointGroupsRestInterceptor, "post_delete" + transports.RegionNetworkEndpointGroupsRestInterceptor, + "post_attach_network_endpoints", ) as post, mock.patch.object( - transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_delete" + transports.RegionNetworkEndpointGroupsRestInterceptor, + "pre_attach_network_endpoints", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.DeleteRegionNetworkEndpointGroupRequest.pb( - compute.DeleteRegionNetworkEndpointGroupRequest() + pb_message = compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest.pb( + compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest() ) transcode.return_value = { "method": "post", @@ -1146,7 +1354,7 @@ def test_delete_unary_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.DeleteRegionNetworkEndpointGroupRequest() + request = compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -1154,7 +1362,7 @@ def test_delete_unary_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.delete_unary( + client.attach_network_endpoints_unary( request, metadata=[ ("key", "val"), @@ -1166,9 +1374,9 @@ def test_delete_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_unary_rest_bad_request( +def test_attach_network_endpoints_unary_rest_bad_request( transport: str = "rest", - request_type=compute.DeleteRegionNetworkEndpointGroupRequest, + request_type=compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, ): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1192,10 +1400,10 @@ def test_delete_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_unary(request) + client.attach_network_endpoints_unary(request) -def test_delete_unary_rest_flattened(): +def test_attach_network_endpoints_unary_rest_flattened(): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1218,6 +1426,11 @@ def test_delete_unary_rest_flattened(): project="project_value", region="region_value", network_endpoint_group="network_endpoint_group_value", + region_network_endpoint_groups_attach_endpoints_request_resource=compute.RegionNetworkEndpointGroupsAttachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ), ) mock_args.update(sample_request) @@ -1230,20 +1443,20 @@ def test_delete_unary_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_unary(**mock_args) + client.attach_network_endpoints_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}" + "%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints" % client.transport._host, args[1], ) -def test_delete_unary_rest_flattened_error(transport: str = "rest"): +def test_attach_network_endpoints_unary_rest_flattened_error(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1252,15 +1465,20 @@ def test_delete_unary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_unary( - compute.DeleteRegionNetworkEndpointGroupRequest(), + client.attach_network_endpoints_unary( + compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest(), project="project_value", region="region_value", network_endpoint_group="network_endpoint_group_value", + region_network_endpoint_groups_attach_endpoints_request_resource=compute.RegionNetworkEndpointGroupsAttachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ), ) -def test_delete_unary_rest_error(): +def test_attach_network_endpoints_unary_rest_error(): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1269,11 +1487,11 @@ def test_delete_unary_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.GetRegionNetworkEndpointGroupRequest, + compute.DeleteRegionNetworkEndpointGroupRequest, dict, ], ) -def test_get_rest(request_type): +def test_delete_rest(request_type): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1290,20 +1508,28 @@ def test_get_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.NetworkEndpointGroup( + return_value = compute.Operation( + client_operation_id="client_operation_id_value", creation_timestamp="creation_timestamp_value", - default_port=1289, description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, id=205, + insert_time="insert_time_value", kind="kind_value", name="name_value", - network="network_value", - network_endpoint_type="network_endpoint_type_value", - psc_target_service="psc_target_service_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, region="region_value", self_link="self_link_value", - size=443, - subnetwork="subnetwork_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", zone="zone_value", ) @@ -1311,33 +1537,41 @@ def test_get_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.NetworkEndpointGroup.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get(request) + response = client.delete(request) # Establish that the response is the type that we expect. - assert isinstance(response, compute.NetworkEndpointGroup) + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" assert response.creation_timestamp == "creation_timestamp_value" - assert response.default_port == 1289 assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 assert response.id == 205 + assert response.insert_time == "insert_time_value" assert response.kind == "kind_value" assert response.name == "name_value" - assert response.network == "network_value" - assert response.network_endpoint_type == "network_endpoint_type_value" - assert response.psc_target_service == "psc_target_service_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.size == 443 - assert response.subnetwork == "subnetwork_value" - assert response.zone == "zone_value" - - -def test_get_rest_required_fields( - request_type=compute.GetRegionNetworkEndpointGroupRequest, + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_delete_rest_required_fields( + request_type=compute.DeleteRegionNetworkEndpointGroupRequest, ): transport_class = transports.RegionNetworkEndpointGroupsRestTransport @@ -1359,7 +1593,7 @@ def test_get_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get._get_unset_required_fields(jsonified_request) + ).delete._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1370,7 +1604,9 @@ def test_get_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get._get_unset_required_fields(jsonified_request) + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -1388,7 +1624,7 @@ def test_get_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.NetworkEndpointGroup() + return_value = compute.Operation() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1400,7 +1636,7 @@ def test_get_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result @@ -1409,30 +1645,2023 @@ def test_get_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.NetworkEndpointGroup.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get(request) + response = client.delete(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_rest_unset_required_fields(): +def test_delete_rest_unset_required_fields(): transport = transports.RegionNetworkEndpointGroupsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get._get_unset_required_fields({}) + unset_fields = transport.delete._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("requestId",)) + & set( + ( + "networkEndpointGroup", + "project", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionNetworkEndpointGroupsRestInterceptor(), + ) + client = RegionNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, "post_delete" + ) as post, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_delete" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionNetworkEndpointGroupRequest.pb( + compute.DeleteRegionNetworkEndpointGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionNetworkEndpointGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request( + transport: str = "rest", + request_type=compute.DeleteRegionNetworkEndpointGroupRequest, +): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + network_endpoint_group="network_endpoint_group_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}" + % client.transport._host, + args[1], + ) + + +def test_delete_rest_flattened_error(transport: str = "rest"): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionNetworkEndpointGroupRequest(), + project="project_value", + region="region_value", + network_endpoint_group="network_endpoint_group_value", + ) + + +def test_delete_rest_error(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.DeleteRegionNetworkEndpointGroupRequest, + dict, + ], +) +def test_delete_unary_rest(request_type): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields( + request_type=compute.DeleteRegionNetworkEndpointGroupRequest, +): + transport_class = transports.RegionNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = "network_endpoint_group_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == "network_endpoint_group_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "networkEndpointGroup", + "project", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionNetworkEndpointGroupsRestInterceptor(), + ) + client = RegionNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, "post_delete" + ) as post, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_delete" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionNetworkEndpointGroupRequest.pb( + compute.DeleteRegionNetworkEndpointGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionNetworkEndpointGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request( + transport: str = "rest", + request_type=compute.DeleteRegionNetworkEndpointGroupRequest, +): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + network_endpoint_group="network_endpoint_group_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}" + % client.transport._host, + args[1], + ) + + +def test_delete_unary_rest_flattened_error(transport: str = "rest"): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionNetworkEndpointGroupRequest(), + project="project_value", + region="region_value", + network_endpoint_group="network_endpoint_group_value", + ) + + +def test_delete_unary_rest_error(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, + dict, + ], +) +def test_detach_network_endpoints_rest(request_type): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + request_init["region_network_endpoint_groups_detach_endpoints_request_resource"] = { + "network_endpoints": [ + { + "annotations": {}, + "fqdn": "fqdn_value", + "instance": "instance_value", + "ip_address": "ip_address_value", + "port": 453, + } + ] + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest.meta.fields[ + "region_network_endpoint_groups_detach_endpoints_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_network_endpoint_groups_detach_endpoints_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_network_endpoint_groups_detach_endpoints_request_resource" + ][field] + ), + ): + del request_init[ + "region_network_endpoint_groups_detach_endpoints_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_network_endpoint_groups_detach_endpoints_request_resource" + ][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.detach_network_endpoints(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_detach_network_endpoints_rest_required_fields( + request_type=compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, +): + transport_class = transports.RegionNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).detach_network_endpoints._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = "network_endpoint_group_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).detach_network_endpoints._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == "network_endpoint_group_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.detach_network_endpoints(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_detach_network_endpoints_rest_unset_required_fields(): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.detach_network_endpoints._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "networkEndpointGroup", + "project", + "region", + "regionNetworkEndpointGroupsDetachEndpointsRequestResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_detach_network_endpoints_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionNetworkEndpointGroupsRestInterceptor(), + ) + client = RegionNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, + "post_detach_network_endpoints", + ) as post, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, + "pre_detach_network_endpoints", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest.pb( + compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.detach_network_endpoints( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_detach_network_endpoints_rest_bad_request( + transport: str = "rest", + request_type=compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, +): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detach_network_endpoints(request) + + +def test_detach_network_endpoints_rest_flattened(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + network_endpoint_group="network_endpoint_group_value", + region_network_endpoint_groups_detach_endpoints_request_resource=compute.RegionNetworkEndpointGroupsDetachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.detach_network_endpoints(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints" + % client.transport._host, + args[1], + ) + + +def test_detach_network_endpoints_rest_flattened_error(transport: str = "rest"): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.detach_network_endpoints( + compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest(), + project="project_value", + region="region_value", + network_endpoint_group="network_endpoint_group_value", + region_network_endpoint_groups_detach_endpoints_request_resource=compute.RegionNetworkEndpointGroupsDetachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ), + ) + + +def test_detach_network_endpoints_rest_error(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, + dict, + ], +) +def test_detach_network_endpoints_unary_rest(request_type): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + request_init["region_network_endpoint_groups_detach_endpoints_request_resource"] = { + "network_endpoints": [ + { + "annotations": {}, + "fqdn": "fqdn_value", + "instance": "instance_value", + "ip_address": "ip_address_value", + "port": 453, + } + ] + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest.meta.fields[ + "region_network_endpoint_groups_detach_endpoints_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_network_endpoint_groups_detach_endpoints_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "region_network_endpoint_groups_detach_endpoints_request_resource" + ][field] + ), + ): + del request_init[ + "region_network_endpoint_groups_detach_endpoints_request_resource" + ][field][i][subfield] + else: + del request_init[ + "region_network_endpoint_groups_detach_endpoints_request_resource" + ][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.detach_network_endpoints_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_detach_network_endpoints_unary_rest_required_fields( + request_type=compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, +): + transport_class = transports.RegionNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).detach_network_endpoints._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = "network_endpoint_group_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).detach_network_endpoints._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == "network_endpoint_group_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.detach_network_endpoints_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_detach_network_endpoints_unary_rest_unset_required_fields(): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.detach_network_endpoints._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "networkEndpointGroup", + "project", + "region", + "regionNetworkEndpointGroupsDetachEndpointsRequestResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_detach_network_endpoints_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionNetworkEndpointGroupsRestInterceptor(), + ) + client = RegionNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, + "post_detach_network_endpoints", + ) as post, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, + "pre_detach_network_endpoints", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest.pb( + compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.detach_network_endpoints_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_detach_network_endpoints_unary_rest_bad_request( + transport: str = "rest", + request_type=compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, +): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detach_network_endpoints_unary(request) + + +def test_detach_network_endpoints_unary_rest_flattened(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + network_endpoint_group="network_endpoint_group_value", + region_network_endpoint_groups_detach_endpoints_request_resource=compute.RegionNetworkEndpointGroupsDetachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.detach_network_endpoints_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints" + % client.transport._host, + args[1], + ) + + +def test_detach_network_endpoints_unary_rest_flattened_error(transport: str = "rest"): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.detach_network_endpoints_unary( + compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest(), + project="project_value", + region="region_value", + network_endpoint_group="network_endpoint_group_value", + region_network_endpoint_groups_detach_endpoints_request_resource=compute.RegionNetworkEndpointGroupsDetachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ), + ) + + +def test_detach_network_endpoints_unary_rest_error(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetRegionNetworkEndpointGroupRequest, + dict, + ], +) +def test_get_rest(request_type): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroup( + creation_timestamp="creation_timestamp_value", + default_port=1289, + description="description_value", + id=205, + kind="kind_value", + name="name_value", + network="network_value", + network_endpoint_type="network_endpoint_type_value", + psc_target_service="psc_target_service_value", + region="region_value", + self_link="self_link_value", + size=443, + subnetwork="subnetwork_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.NetworkEndpointGroup) + assert response.creation_timestamp == "creation_timestamp_value" + assert response.default_port == 1289 + assert response.description == "description_value" + assert response.id == 205 + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.network == "network_value" + assert response.network_endpoint_type == "network_endpoint_type_value" + assert response.psc_target_service == "psc_target_service_value" + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.size == 443 + assert response.subnetwork == "subnetwork_value" + assert response.zone == "zone_value" + + +def test_get_rest_required_fields( + request_type=compute.GetRegionNetworkEndpointGroupRequest, +): + transport_class = transports.RegionNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = "network_endpoint_group_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == "network_endpoint_group_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "networkEndpointGroup", + "project", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionNetworkEndpointGroupsRestInterceptor(), + ) + client = RegionNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, "post_get" + ) as post, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_get" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionNetworkEndpointGroupRequest.pb( + compute.GetRegionNetworkEndpointGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkEndpointGroup.to_json( + compute.NetworkEndpointGroup() + ) + + request = compute.GetRegionNetworkEndpointGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkEndpointGroup() + + client.get( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionNetworkEndpointGroupRequest +): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + network_endpoint_group="network_endpoint_group_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}" + % client.transport._host, + args[1], + ) + + +def test_get_rest_flattened_error(transport: str = "rest"): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionNetworkEndpointGroupRequest(), + project="project_value", + region="region_value", + network_endpoint_group="network_endpoint_group_value", + ) + + +def test_get_rest_error(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.InsertRegionNetworkEndpointGroupRequest, + dict, + ], +) +def test_insert_rest(request_type): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["network_endpoint_group_resource"] = { + "annotations": {}, + "app_engine": { + "service": "service_value", + "url_mask": "url_mask_value", + "version": "version_value", + }, + "cloud_function": {"function": "function_value", "url_mask": "url_mask_value"}, + "cloud_run": { + "service": "service_value", + "tag": "tag_value", + "url_mask": "url_mask_value", + }, + "creation_timestamp": "creation_timestamp_value", + "default_port": 1289, + "description": "description_value", + "id": 205, + "kind": "kind_value", + "name": "name_value", + "network": "network_value", + "network_endpoint_type": "network_endpoint_type_value", + "psc_data": { + "consumer_psc_address": "consumer_psc_address_value", + "psc_connection_id": 1793, + "psc_connection_status": "psc_connection_status_value", + }, + "psc_target_service": "psc_target_service_value", + "region": "region_value", + "self_link": "self_link_value", + "size": 443, + "subnetwork": "subnetwork_value", + "zone": "zone_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionNetworkEndpointGroupRequest.meta.fields[ + "network_endpoint_group_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "network_endpoint_group_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["network_endpoint_group_resource"][field]) + ): + del request_init["network_endpoint_group_resource"][field][i][ + subfield + ] + else: + del request_init["network_endpoint_group_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_insert_rest_required_fields( + request_type=compute.InsertRegionNetworkEndpointGroupRequest, +): + transport_class = transports.RegionNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.insert(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) & set( ( - "networkEndpointGroup", + "networkEndpointGroupResource", "project", "region", ) @@ -1441,7 +3670,7 @@ def test_get_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_rest_interceptors(null_interceptor): +def test_insert_rest_interceptors(null_interceptor): transport = transports.RegionNetworkEndpointGroupsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1454,14 +3683,14 @@ def test_get_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionNetworkEndpointGroupsRestInterceptor, "post_get" + transports.RegionNetworkEndpointGroupsRestInterceptor, "post_insert" ) as post, mock.patch.object( - transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_get" + transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.GetRegionNetworkEndpointGroupRequest.pb( - compute.GetRegionNetworkEndpointGroupRequest() + pb_message = compute.InsertRegionNetworkEndpointGroupRequest.pb( + compute.InsertRegionNetworkEndpointGroupRequest() ) transcode.return_value = { "method": "post", @@ -1473,19 +3702,17 @@ def test_get_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.NetworkEndpointGroup.to_json( - compute.NetworkEndpointGroup() - ) + req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.GetRegionNetworkEndpointGroupRequest() + request = compute.InsertRegionNetworkEndpointGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.NetworkEndpointGroup() + post.return_value = compute.Operation() - client.get( + client.insert( request, metadata=[ ("key", "val"), @@ -1497,8 +3724,9 @@ def test_get_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_rest_bad_request( - transport: str = "rest", request_type=compute.GetRegionNetworkEndpointGroupRequest +def test_insert_rest_bad_request( + transport: str = "rest", + request_type=compute.InsertRegionNetworkEndpointGroupRequest, ): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1506,11 +3734,7 @@ def test_get_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "project": "sample1", - "region": "sample2", - "network_endpoint_group": "sample3", - } + request_init = {"project": "sample1", "region": "sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1522,10 +3746,10 @@ def test_get_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get(request) + client.insert(request) -def test_get_rest_flattened(): +def test_insert_rest_flattened(): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1534,20 +3758,18 @@ def test_get_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.NetworkEndpointGroup() + return_value = compute.Operation() # get arguments that satisfy an http rule for this method - sample_request = { - "project": "sample1", - "region": "sample2", - "network_endpoint_group": "sample3", - } + sample_request = {"project": "sample1", "region": "sample2"} # get truthy value for each flattened field mock_args = dict( project="project_value", region="region_value", - network_endpoint_group="network_endpoint_group_value", + network_endpoint_group_resource=compute.NetworkEndpointGroup( + annotations={"key_value": "value_value"} + ), ) mock_args.update(sample_request) @@ -1555,25 +3777,25 @@ def test_get_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.NetworkEndpointGroup.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get(**mock_args) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}" + "%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups" % client.transport._host, args[1], ) -def test_get_rest_flattened_error(transport: str = "rest"): +def test_insert_rest_flattened_error(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1582,15 +3804,17 @@ def test_get_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get( - compute.GetRegionNetworkEndpointGroupRequest(), + client.insert( + compute.InsertRegionNetworkEndpointGroupRequest(), project="project_value", region="region_value", - network_endpoint_group="network_endpoint_group_value", + network_endpoint_group_resource=compute.NetworkEndpointGroup( + annotations={"key_value": "value_value"} + ), ) -def test_get_rest_error(): +def test_insert_rest_error(): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1603,7 +3827,7 @@ def test_get_rest_error(): dict, ], ) -def test_insert_rest(request_type): +def test_insert_unary_rest(request_type): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1758,35 +3982,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.insert(request) + response = client.insert_unary(request) # Establish that the response is the type that we expect. - assert isinstance(response, extended_operation.ExtendedOperation) - assert response.client_operation_id == "client_operation_id_value" - assert response.creation_timestamp == "creation_timestamp_value" - assert response.description == "description_value" - assert response.end_time == "end_time_value" - assert response.http_error_message == "http_error_message_value" - assert response.http_error_status_code == 2374 - assert response.id == 205 - assert response.insert_time == "insert_time_value" - assert response.kind == "kind_value" - assert response.name == "name_value" - assert response.operation_group_id == "operation_group_id_value" - assert response.operation_type == "operation_type_value" - assert response.progress == 885 - assert response.region == "region_value" - assert response.self_link == "self_link_value" - assert response.start_time == "start_time_value" - assert response.status == compute.Operation.Status.DONE - assert response.status_message == "status_message_value" - assert response.target_id == 947 - assert response.target_link == "target_link_value" - assert response.user == "user_value" - assert response.zone == "zone_value" + assert isinstance(response, compute.Operation) -def test_insert_rest_required_fields( +def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionNetworkEndpointGroupRequest, ): transport_class = transports.RegionNetworkEndpointGroupsRestTransport @@ -1864,14 +4066,14 @@ def test_insert_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.insert(request) + response = client.insert_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_insert_rest_unset_required_fields(): +def test_insert_unary_rest_unset_required_fields(): transport = transports.RegionNetworkEndpointGroupsRestTransport( credentials=ga_credentials.AnonymousCredentials ) @@ -1890,7 +4092,7 @@ def test_insert_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_insert_rest_interceptors(null_interceptor): +def test_insert_unary_rest_interceptors(null_interceptor): transport = transports.RegionNetworkEndpointGroupsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1932,7 +4134,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.insert( + client.insert_unary( request, metadata=[ ("key", "val"), @@ -1944,7 +4146,7 @@ def test_insert_rest_interceptors(null_interceptor): post.assert_called_once() -def test_insert_rest_bad_request( +def test_insert_unary_rest_bad_request( transport: str = "rest", request_type=compute.InsertRegionNetworkEndpointGroupRequest, ): @@ -1966,10 +4168,10 @@ def test_insert_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.insert(request) + client.insert_unary(request) -def test_insert_rest_flattened(): +def test_insert_unary_rest_flattened(): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2002,7 +4204,7 @@ def test_insert_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.insert(**mock_args) + client.insert_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -2015,7 +4217,7 @@ def test_insert_rest_flattened(): ) -def test_insert_rest_flattened_error(transport: str = "rest"): +def test_insert_unary_rest_flattened_error(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2024,7 +4226,7 @@ def test_insert_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.insert( + client.insert_unary( compute.InsertRegionNetworkEndpointGroupRequest(), project="project_value", region="region_value", @@ -2034,7 +4236,7 @@ def test_insert_rest_flattened_error(transport: str = "rest"): ) -def test_insert_rest_error(): +def test_insert_unary_rest_error(): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2043,11 +4245,11 @@ def test_insert_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.InsertRegionNetworkEndpointGroupRequest, + compute.ListRegionNetworkEndpointGroupsRequest, dict, ], ) -def test_insert_unary_rest(request_type): +def test_list_rest(request_type): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2055,161 +4257,39 @@ def test_insert_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} - request_init["network_endpoint_group_resource"] = { - "annotations": {}, - "app_engine": { - "service": "service_value", - "url_mask": "url_mask_value", - "version": "version_value", - }, - "cloud_function": {"function": "function_value", "url_mask": "url_mask_value"}, - "cloud_run": { - "service": "service_value", - "tag": "tag_value", - "url_mask": "url_mask_value", - }, - "creation_timestamp": "creation_timestamp_value", - "default_port": 1289, - "description": "description_value", - "id": 205, - "kind": "kind_value", - "name": "name_value", - "network": "network_value", - "network_endpoint_type": "network_endpoint_type_value", - "psc_data": { - "consumer_psc_address": "consumer_psc_address_value", - "psc_connection_id": 1793, - "psc_connection_status": "psc_connection_status_value", - }, - "psc_target_service": "psc_target_service_value", - "region": "region_value", - "self_link": "self_link_value", - "size": 443, - "subnetwork": "subnetwork_value", - "zone": "zone_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = compute.InsertRegionNetworkEndpointGroupRequest.meta.fields[ - "network_endpoint_group_resource" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "network_endpoint_group_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["network_endpoint_group_resource"][field]) - ): - del request_init["network_endpoint_group_resource"][field][i][ - subfield - ] - else: - del request_init["network_endpoint_group_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Operation( - client_operation_id="client_operation_id_value", - creation_timestamp="creation_timestamp_value", - description="description_value", - end_time="end_time_value", - http_error_message="http_error_message_value", - http_error_status_code=2374, - id=205, - insert_time="insert_time_value", + return_value = compute.NetworkEndpointGroupList( + id="id_value", kind="kind_value", - name="name_value", - operation_group_id="operation_group_id_value", - operation_type="operation_type_value", - progress=885, - region="region_value", - self_link="self_link_value", - start_time="start_time_value", - status=compute.Operation.Status.DONE, - status_message="status_message_value", - target_id=947, - target_link="target_link_value", - user="user_value", - zone="zone_value", + next_page_token="next_page_token_value", + self_link="self_link_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.NetworkEndpointGroupList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.insert_unary(request) + response = client.list(request) # Establish that the response is the type that we expect. - assert isinstance(response, compute.Operation) + assert isinstance(response, pagers.ListPager) + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" -def test_insert_unary_rest_required_fields( - request_type=compute.InsertRegionNetworkEndpointGroupRequest, +def test_list_rest_required_fields( + request_type=compute.ListRegionNetworkEndpointGroupsRequest, ): transport_class = transports.RegionNetworkEndpointGroupsRestTransport @@ -2230,7 +4310,7 @@ def test_insert_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).insert._get_unset_required_fields(jsonified_request) + ).list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2240,9 +4320,17 @@ def test_insert_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).insert._get_unset_required_fields(jsonified_request) + ).list._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2258,7 +4346,7 @@ def test_insert_unary_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.Operation() + return_value = compute.NetworkEndpointGroupList() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2270,40 +4358,46 @@ def test_insert_unary_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.NetworkEndpointGroupList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.insert_unary(request) + response = client.list(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_insert_unary_rest_unset_required_fields(): +def test_list_rest_unset_required_fields(): transport = transports.RegionNetworkEndpointGroupsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.insert._get_unset_required_fields({}) + unset_fields = transport.list._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) & set( ( - "networkEndpointGroupResource", "project", "region", ) @@ -2312,7 +4406,7 @@ def test_insert_unary_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_insert_unary_rest_interceptors(null_interceptor): +def test_list_rest_interceptors(null_interceptor): transport = transports.RegionNetworkEndpointGroupsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2325,14 +4419,14 @@ def test_insert_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionNetworkEndpointGroupsRestInterceptor, "post_insert" + transports.RegionNetworkEndpointGroupsRestInterceptor, "post_list" ) as post, mock.patch.object( - transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_insert" + transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.InsertRegionNetworkEndpointGroupRequest.pb( - compute.InsertRegionNetworkEndpointGroupRequest() + pb_message = compute.ListRegionNetworkEndpointGroupsRequest.pb( + compute.ListRegionNetworkEndpointGroupsRequest() ) transcode.return_value = { "method": "post", @@ -2344,17 +4438,19 @@ def test_insert_unary_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.Operation.to_json(compute.Operation()) + req.return_value._content = compute.NetworkEndpointGroupList.to_json( + compute.NetworkEndpointGroupList() + ) - request = compute.InsertRegionNetworkEndpointGroupRequest() + request = compute.ListRegionNetworkEndpointGroupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.Operation() + post.return_value = compute.NetworkEndpointGroupList() - client.insert_unary( + client.list( request, metadata=[ ("key", "val"), @@ -2366,9 +4462,8 @@ def test_insert_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_insert_unary_rest_bad_request( - transport: str = "rest", - request_type=compute.InsertRegionNetworkEndpointGroupRequest, +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionNetworkEndpointGroupsRequest ): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2388,10 +4483,10 @@ def test_insert_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.insert_unary(request) + client.list(request) -def test_insert_unary_rest_flattened(): +def test_list_rest_flattened(): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2400,7 +4495,7 @@ def test_insert_unary_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Operation() + return_value = compute.NetworkEndpointGroupList() # get arguments that satisfy an http rule for this method sample_request = {"project": "sample1", "region": "sample2"} @@ -2409,9 +4504,6 @@ def test_insert_unary_rest_flattened(): mock_args = dict( project="project_value", region="region_value", - network_endpoint_group_resource=compute.NetworkEndpointGroup( - annotations={"key_value": "value_value"} - ), ) mock_args.update(sample_request) @@ -2419,12 +4511,12 @@ def test_insert_unary_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.NetworkEndpointGroupList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.insert_unary(**mock_args) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -2437,7 +4529,7 @@ def test_insert_unary_rest_flattened(): ) -def test_insert_unary_rest_flattened_error(transport: str = "rest"): +def test_list_rest_flattened_error(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2446,74 +4538,131 @@ def test_insert_unary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.insert_unary( - compute.InsertRegionNetworkEndpointGroupRequest(), + client.list( + compute.ListRegionNetworkEndpointGroupsRequest(), project="project_value", region="region_value", - network_endpoint_group_resource=compute.NetworkEndpointGroup( - annotations={"key_value": "value_value"} - ), ) -def test_insert_unary_rest_error(): +def test_list_rest_pager(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NetworkEndpointGroupList( + items=[ + compute.NetworkEndpointGroup(), + compute.NetworkEndpointGroup(), + compute.NetworkEndpointGroup(), + ], + next_page_token="abc", + ), + compute.NetworkEndpointGroupList( + items=[], + next_page_token="def", + ), + compute.NetworkEndpointGroupList( + items=[ + compute.NetworkEndpointGroup(), + ], + next_page_token="ghi", + ), + compute.NetworkEndpointGroupList( + items=[ + compute.NetworkEndpointGroup(), + compute.NetworkEndpointGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NetworkEndpointGroupList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1", "region": "sample2"} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.NetworkEndpointGroup) for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - compute.ListRegionNetworkEndpointGroupsRequest, + compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest, dict, ], ) -def test_list_rest(request_type): +def test_list_network_endpoints_rest(request_type): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2"} + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.NetworkEndpointGroupList( + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints( id="id_value", kind="kind_value", next_page_token="next_page_token_value", - self_link="self_link_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.NetworkEndpointGroupList.pb(return_value) + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list(request) + response = client.list_network_endpoints(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPager) + assert isinstance(response, pagers.ListNetworkEndpointsPager) assert response.id == "id_value" assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" - assert response.self_link == "self_link_value" -def test_list_rest_required_fields( - request_type=compute.ListRegionNetworkEndpointGroupsRequest, +def test_list_network_endpoints_rest_required_fields( + request_type=compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest, ): transport_class = transports.RegionNetworkEndpointGroupsRestTransport request_init = {} + request_init["network_endpoint_group"] = "" request_init["project"] = "" request_init["region"] = "" request = request_type(**request_init) @@ -2530,17 +4679,18 @@ def test_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list._get_unset_required_fields(jsonified_request) + ).list_network_endpoints._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["networkEndpointGroup"] = "network_endpoint_group_value" jsonified_request["project"] = "project_value" jsonified_request["region"] = "region_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list._get_unset_required_fields(jsonified_request) + ).list_network_endpoints._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -2554,6 +4704,8 @@ def test_list_rest_required_fields( jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == "network_endpoint_group_value" assert "project" in jsonified_request assert jsonified_request["project"] == "project_value" assert "region" in jsonified_request @@ -2566,7 +4718,7 @@ def test_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.NetworkEndpointGroupList() + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2578,7 +4730,7 @@ def test_list_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } transcode.return_value = transcode_result @@ -2587,25 +4739,27 @@ def test_list_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.NetworkEndpointGroupList.pb(return_value) + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list(request) + response = client.list_network_endpoints(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_rest_unset_required_fields(): +def test_list_network_endpoints_rest_unset_required_fields(): transport = transports.RegionNetworkEndpointGroupsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list._get_unset_required_fields({}) + unset_fields = transport.list_network_endpoints._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -2618,6 +4772,7 @@ def test_list_rest_unset_required_fields(): ) & set( ( + "networkEndpointGroup", "project", "region", ) @@ -2626,7 +4781,7 @@ def test_list_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_rest_interceptors(null_interceptor): +def test_list_network_endpoints_rest_interceptors(null_interceptor): transport = transports.RegionNetworkEndpointGroupsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2639,14 +4794,16 @@ def test_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionNetworkEndpointGroupsRestInterceptor, "post_list" + transports.RegionNetworkEndpointGroupsRestInterceptor, + "post_list_network_endpoints", ) as post, mock.patch.object( - transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_list" + transports.RegionNetworkEndpointGroupsRestInterceptor, + "pre_list_network_endpoints", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.ListRegionNetworkEndpointGroupsRequest.pb( - compute.ListRegionNetworkEndpointGroupsRequest() + pb_message = compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest.pb( + compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest() ) transcode.return_value = { "method": "post", @@ -2658,19 +4815,21 @@ def test_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.NetworkEndpointGroupList.to_json( - compute.NetworkEndpointGroupList() + req.return_value._content = ( + compute.NetworkEndpointGroupsListNetworkEndpoints.to_json( + compute.NetworkEndpointGroupsListNetworkEndpoints() + ) ) - request = compute.ListRegionNetworkEndpointGroupsRequest() + request = compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.NetworkEndpointGroupList() + post.return_value = compute.NetworkEndpointGroupsListNetworkEndpoints() - client.list( + client.list_network_endpoints( request, metadata=[ ("key", "val"), @@ -2682,8 +4841,9 @@ def test_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_rest_bad_request( - transport: str = "rest", request_type=compute.ListRegionNetworkEndpointGroupsRequest +def test_list_network_endpoints_rest_bad_request( + transport: str = "rest", + request_type=compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest, ): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2691,7 +4851,11 @@ def test_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2"} + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2703,10 +4867,10 @@ def test_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list(request) + client.list_network_endpoints(request) -def test_list_rest_flattened(): +def test_list_network_endpoints_rest_flattened(): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2715,15 +4879,20 @@ def test_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.NetworkEndpointGroupList() + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints() # get arguments that satisfy an http rule for this method - sample_request = {"project": "sample1", "region": "sample2"} + sample_request = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } # get truthy value for each flattened field mock_args = dict( project="project_value", region="region_value", + network_endpoint_group="network_endpoint_group_value", ) mock_args.update(sample_request) @@ -2731,25 +4900,27 @@ def test_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.NetworkEndpointGroupList.pb(return_value) + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list(**mock_args) + client.list_network_endpoints(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups" + "%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}/listNetworkEndpoints" % client.transport._host, args[1], ) -def test_list_rest_flattened_error(transport: str = "rest"): +def test_list_network_endpoints_rest_flattened_error(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2758,14 +4929,15 @@ def test_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list( - compute.ListRegionNetworkEndpointGroupsRequest(), + client.list_network_endpoints( + compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest(), project="project_value", region="region_value", + network_endpoint_group="network_endpoint_group_value", ) -def test_list_rest_pager(transport: str = "rest"): +def test_list_network_endpoints_rest_pager(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2777,28 +4949,28 @@ def test_list_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - compute.NetworkEndpointGroupList( + compute.NetworkEndpointGroupsListNetworkEndpoints( items=[ - compute.NetworkEndpointGroup(), - compute.NetworkEndpointGroup(), - compute.NetworkEndpointGroup(), + compute.NetworkEndpointWithHealthStatus(), + compute.NetworkEndpointWithHealthStatus(), + compute.NetworkEndpointWithHealthStatus(), ], next_page_token="abc", ), - compute.NetworkEndpointGroupList( + compute.NetworkEndpointGroupsListNetworkEndpoints( items=[], next_page_token="def", ), - compute.NetworkEndpointGroupList( + compute.NetworkEndpointGroupsListNetworkEndpoints( items=[ - compute.NetworkEndpointGroup(), + compute.NetworkEndpointWithHealthStatus(), ], next_page_token="ghi", ), - compute.NetworkEndpointGroupList( + compute.NetworkEndpointGroupsListNetworkEndpoints( items=[ - compute.NetworkEndpointGroup(), - compute.NetworkEndpointGroup(), + compute.NetworkEndpointWithHealthStatus(), + compute.NetworkEndpointWithHealthStatus(), ], ), ) @@ -2806,22 +4978,31 @@ def test_list_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(compute.NetworkEndpointGroupList.to_json(x) for x in response) + response = tuple( + compute.NetworkEndpointGroupsListNetworkEndpoints.to_json(x) + for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {"project": "sample1", "region": "sample2"} + sample_request = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } - pager = client.list(request=sample_request) + pager = client.list_network_endpoints(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, compute.NetworkEndpointGroup) for i in results) + assert all( + isinstance(i, compute.NetworkEndpointWithHealthStatus) for i in results + ) - pages = list(client.list(request=sample_request).pages) + pages = list(client.list_network_endpoints(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2936,10 +5117,13 @@ def test_region_network_endpoint_groups_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( + "attach_network_endpoints", "delete", + "detach_network_endpoints", "get", "insert", "list", + "list_network_endpoints", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3079,9 +5263,15 @@ def test_region_network_endpoint_groups_client_transport_session_collision( credentials=creds2, transport=transport_name, ) + session1 = client1.transport.attach_network_endpoints._session + session2 = client2.transport.attach_network_endpoints._session + assert session1 != session2 session1 = client1.transport.delete._session session2 = client2.transport.delete._session assert session1 != session2 + session1 = client1.transport.detach_network_endpoints._session + session2 = client2.transport.detach_network_endpoints._session + assert session1 != session2 session1 = client1.transport.get._session session2 = client2.transport.get._session assert session1 != session2 @@ -3091,6 +5281,9 @@ def test_region_network_endpoint_groups_client_transport_session_collision( session1 = client1.transport.list._session session2 = client2.transport.list._session assert session1 != session2 + session1 = client1.transport.list_network_endpoints._session + session2 = client2.transport.list_network_endpoints._session + assert session1 != session2 def test_common_billing_account_path(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py index 9477af686f5f..842a8445aaee 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py @@ -591,11 +591,11 @@ def test_region_security_policies_client_client_options_credentials_file( @pytest.mark.parametrize( "request_type", [ - compute.DeleteRegionSecurityPolicyRequest, + compute.AddRuleRegionSecurityPolicyRequest, dict, ], ) -def test_delete_rest(request_type): +def test_add_rule_rest(request_type): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -607,6 +607,157 @@ def test_delete_rest(request_type): "region": "sample2", "security_policy": "sample3", } + request_init["security_policy_rule_resource"] = { + "action": "action_value", + "description": "description_value", + "header_action": { + "request_headers_to_adds": [ + { + "header_name": "header_name_value", + "header_value": "header_value_value", + } + ] + }, + "kind": "kind_value", + "match": { + "config": { + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"] + }, + "expr": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "versioned_expr": "versioned_expr_value", + }, + "network_match": { + "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], + "user_defined_fields": [ + {"name": "name_value", "values": ["values_value1", "values_value2"]} + ], + }, + "preconfigured_waf_config": { + "exclusions": [ + { + "request_cookies_to_exclude": [ + {"op": "op_value", "val": "val_value"} + ], + "request_headers_to_exclude": {}, + "request_query_params_to_exclude": {}, + "request_uris_to_exclude": {}, + "target_rule_ids": [ + "target_rule_ids_value1", + "target_rule_ids_value2", + ], + "target_rule_set": "target_rule_set_value", + } + ] + }, + "preview": True, + "priority": 898, + "rate_limit_options": { + "ban_duration_sec": 1680, + "ban_threshold": {"count": 553, "interval_sec": 1279}, + "conform_action": "conform_action_value", + "enforce_on_key": "enforce_on_key_value", + "enforce_on_key_configs": [ + { + "enforce_on_key_name": "enforce_on_key_name_value", + "enforce_on_key_type": "enforce_on_key_type_value", + } + ], + "enforce_on_key_name": "enforce_on_key_name_value", + "exceed_action": "exceed_action_value", + "exceed_redirect_options": { + "target": "target_value", + "type_": "type__value", + }, + "rate_limit_threshold": {}, + }, + "redirect_options": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddRuleRegionSecurityPolicyRequest.meta.fields[ + "security_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_rule_resource"][field]) + ): + del request_init["security_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -646,7 +797,7 @@ def test_delete_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete(request) + response = client.add_rule(request) # Establish that the response is the type that we expect. assert isinstance(response, extended_operation.ExtendedOperation) @@ -674,8 +825,8 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" -def test_delete_rest_required_fields( - request_type=compute.DeleteRegionSecurityPolicyRequest, +def test_add_rule_rest_required_fields( + request_type=compute.AddRuleRegionSecurityPolicyRequest, ): transport_class = transports.RegionSecurityPoliciesRestTransport @@ -697,7 +848,7 @@ def test_delete_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).add_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -708,9 +859,9 @@ def test_delete_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).add_rule._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set(("validate_only",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -740,9 +891,10 @@ def test_delete_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -755,33 +907,34 @@ def test_delete_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete(request) + response = client.add_rule(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_rest_unset_required_fields(): +def test_add_rule_rest_unset_required_fields(): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete._get_unset_required_fields({}) + unset_fields = transport.add_rule._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) + set(("validateOnly",)) & set( ( "project", "region", "securityPolicy", + "securityPolicyRuleResource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_rest_interceptors(null_interceptor): +def test_add_rule_rest_interceptors(null_interceptor): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -794,14 +947,14 @@ def test_delete_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "post_delete" + transports.RegionSecurityPoliciesRestInterceptor, "post_add_rule" ) as post, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "pre_delete" + transports.RegionSecurityPoliciesRestInterceptor, "pre_add_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.DeleteRegionSecurityPolicyRequest.pb( - compute.DeleteRegionSecurityPolicyRequest() + pb_message = compute.AddRuleRegionSecurityPolicyRequest.pb( + compute.AddRuleRegionSecurityPolicyRequest() ) transcode.return_value = { "method": "post", @@ -815,7 +968,7 @@ def test_delete_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.DeleteRegionSecurityPolicyRequest() + request = compute.AddRuleRegionSecurityPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -823,7 +976,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.delete( + client.add_rule( request, metadata=[ ("key", "val"), @@ -835,8 +988,8 @@ def test_delete_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_rest_bad_request( - transport: str = "rest", request_type=compute.DeleteRegionSecurityPolicyRequest +def test_add_rule_rest_bad_request( + transport: str = "rest", request_type=compute.AddRuleRegionSecurityPolicyRequest ): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -860,10 +1013,10 @@ def test_delete_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete(request) + client.add_rule(request) -def test_delete_rest_flattened(): +def test_add_rule_rest_flattened(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -886,6 +1039,9 @@ def test_delete_rest_flattened(): project="project_value", region="region_value", security_policy="security_policy_value", + security_policy_rule_resource=compute.SecurityPolicyRule( + action="action_value" + ), ) mock_args.update(sample_request) @@ -898,20 +1054,20 @@ def test_delete_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete(**mock_args) + client.add_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}" + "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}/addRule" % client.transport._host, args[1], ) -def test_delete_rest_flattened_error(transport: str = "rest"): +def test_add_rule_rest_flattened_error(transport: str = "rest"): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -920,15 +1076,18 @@ def test_delete_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete( - compute.DeleteRegionSecurityPolicyRequest(), + client.add_rule( + compute.AddRuleRegionSecurityPolicyRequest(), project="project_value", region="region_value", security_policy="security_policy_value", + security_policy_rule_resource=compute.SecurityPolicyRule( + action="action_value" + ), ) -def test_delete_rest_error(): +def test_add_rule_rest_error(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -937,11 +1096,11 @@ def test_delete_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.DeleteRegionSecurityPolicyRequest, + compute.AddRuleRegionSecurityPolicyRequest, dict, ], ) -def test_delete_unary_rest(request_type): +def test_add_rule_unary_rest(request_type): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -953,6 +1112,157 @@ def test_delete_unary_rest(request_type): "region": "sample2", "security_policy": "sample3", } + request_init["security_policy_rule_resource"] = { + "action": "action_value", + "description": "description_value", + "header_action": { + "request_headers_to_adds": [ + { + "header_name": "header_name_value", + "header_value": "header_value_value", + } + ] + }, + "kind": "kind_value", + "match": { + "config": { + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"] + }, + "expr": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "versioned_expr": "versioned_expr_value", + }, + "network_match": { + "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], + "user_defined_fields": [ + {"name": "name_value", "values": ["values_value1", "values_value2"]} + ], + }, + "preconfigured_waf_config": { + "exclusions": [ + { + "request_cookies_to_exclude": [ + {"op": "op_value", "val": "val_value"} + ], + "request_headers_to_exclude": {}, + "request_query_params_to_exclude": {}, + "request_uris_to_exclude": {}, + "target_rule_ids": [ + "target_rule_ids_value1", + "target_rule_ids_value2", + ], + "target_rule_set": "target_rule_set_value", + } + ] + }, + "preview": True, + "priority": 898, + "rate_limit_options": { + "ban_duration_sec": 1680, + "ban_threshold": {"count": 553, "interval_sec": 1279}, + "conform_action": "conform_action_value", + "enforce_on_key": "enforce_on_key_value", + "enforce_on_key_configs": [ + { + "enforce_on_key_name": "enforce_on_key_name_value", + "enforce_on_key_type": "enforce_on_key_type_value", + } + ], + "enforce_on_key_name": "enforce_on_key_name_value", + "exceed_action": "exceed_action_value", + "exceed_redirect_options": { + "target": "target_value", + "type_": "type__value", + }, + "rate_limit_threshold": {}, + }, + "redirect_options": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddRuleRegionSecurityPolicyRequest.meta.fields[ + "security_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_rule_resource"][field]) + ): + del request_init["security_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -992,14 +1302,14 @@ def test_delete_unary_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_unary(request) + response = client.add_rule_unary(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Operation) -def test_delete_unary_rest_required_fields( - request_type=compute.DeleteRegionSecurityPolicyRequest, +def test_add_rule_unary_rest_required_fields( + request_type=compute.AddRuleRegionSecurityPolicyRequest, ): transport_class = transports.RegionSecurityPoliciesRestTransport @@ -1021,7 +1331,7 @@ def test_delete_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).add_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1032,9 +1342,9 @@ def test_delete_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete._get_unset_required_fields(jsonified_request) + ).add_rule._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set(("validate_only",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -1064,9 +1374,10 @@ def test_delete_unary_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -1079,33 +1390,34 @@ def test_delete_unary_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_unary(request) + response = client.add_rule_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_unary_rest_unset_required_fields(): +def test_add_rule_unary_rest_unset_required_fields(): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete._get_unset_required_fields({}) + unset_fields = transport.add_rule._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) + set(("validateOnly",)) & set( ( "project", "region", "securityPolicy", + "securityPolicyRuleResource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_unary_rest_interceptors(null_interceptor): +def test_add_rule_unary_rest_interceptors(null_interceptor): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1118,14 +1430,14 @@ def test_delete_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "post_delete" + transports.RegionSecurityPoliciesRestInterceptor, "post_add_rule" ) as post, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "pre_delete" + transports.RegionSecurityPoliciesRestInterceptor, "pre_add_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.DeleteRegionSecurityPolicyRequest.pb( - compute.DeleteRegionSecurityPolicyRequest() + pb_message = compute.AddRuleRegionSecurityPolicyRequest.pb( + compute.AddRuleRegionSecurityPolicyRequest() ) transcode.return_value = { "method": "post", @@ -1139,7 +1451,7 @@ def test_delete_unary_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.DeleteRegionSecurityPolicyRequest() + request = compute.AddRuleRegionSecurityPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -1147,7 +1459,7 @@ def test_delete_unary_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.delete_unary( + client.add_rule_unary( request, metadata=[ ("key", "val"), @@ -1159,8 +1471,8 @@ def test_delete_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_unary_rest_bad_request( - transport: str = "rest", request_type=compute.DeleteRegionSecurityPolicyRequest +def test_add_rule_unary_rest_bad_request( + transport: str = "rest", request_type=compute.AddRuleRegionSecurityPolicyRequest ): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1184,10 +1496,10 @@ def test_delete_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_unary(request) + client.add_rule_unary(request) -def test_delete_unary_rest_flattened(): +def test_add_rule_unary_rest_flattened(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1210,6 +1522,9 @@ def test_delete_unary_rest_flattened(): project="project_value", region="region_value", security_policy="security_policy_value", + security_policy_rule_resource=compute.SecurityPolicyRule( + action="action_value" + ), ) mock_args.update(sample_request) @@ -1222,20 +1537,20 @@ def test_delete_unary_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_unary(**mock_args) + client.add_rule_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}" + "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}/addRule" % client.transport._host, args[1], ) -def test_delete_unary_rest_flattened_error(transport: str = "rest"): +def test_add_rule_unary_rest_flattened_error(transport: str = "rest"): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1244,15 +1559,18 @@ def test_delete_unary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_unary( - compute.DeleteRegionSecurityPolicyRequest(), + client.add_rule_unary( + compute.AddRuleRegionSecurityPolicyRequest(), project="project_value", region="region_value", security_policy="security_policy_value", + security_policy_rule_resource=compute.SecurityPolicyRule( + action="action_value" + ), ) -def test_delete_unary_rest_error(): +def test_add_rule_unary_rest_error(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1261,11 +1579,11 @@ def test_delete_unary_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.GetRegionSecurityPolicyRequest, + compute.DeleteRegionSecurityPolicyRequest, dict, ], ) -def test_get_rest(request_type): +def test_delete_rest(request_type): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1282,45 +1600,71 @@ def test_get_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.SecurityPolicy( + return_value = compute.Operation( + client_operation_id="client_operation_id_value", creation_timestamp="creation_timestamp_value", description="description_value", - fingerprint="fingerprint_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, id=205, + insert_time="insert_time_value", kind="kind_value", - label_fingerprint="label_fingerprint_value", name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, region="region_value", self_link="self_link_value", - type_="type__value", - ) - - # Wrap the value into a proper Response obj + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.SecurityPolicy.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get(request) + response = client.delete(request) # Establish that the response is the type that we expect. - assert isinstance(response, compute.SecurityPolicy) + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" - assert response.fingerprint == "fingerprint_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 assert response.id == 205 + assert response.insert_time == "insert_time_value" assert response.kind == "kind_value" - assert response.label_fingerprint == "label_fingerprint_value" assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.type_ == "type__value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" -def test_get_rest_required_fields(request_type=compute.GetRegionSecurityPolicyRequest): +def test_delete_rest_required_fields( + request_type=compute.DeleteRegionSecurityPolicyRequest, +): transport_class = transports.RegionSecurityPoliciesRestTransport request_init = {} @@ -1341,7 +1685,7 @@ def test_get_rest_required_fields(request_type=compute.GetRegionSecurityPolicyRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get._get_unset_required_fields(jsonified_request) + ).delete._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1352,7 +1696,9 @@ def test_get_rest_required_fields(request_type=compute.GetRegionSecurityPolicyRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get._get_unset_required_fields(jsonified_request) + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -1370,7 +1716,7 @@ def test_get_rest_required_fields(request_type=compute.GetRegionSecurityPolicyRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.SecurityPolicy() + return_value = compute.Operation() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1382,7 +1728,7 @@ def test_get_rest_required_fields(request_type=compute.GetRegionSecurityPolicyRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result @@ -1391,27 +1737,27 @@ def test_get_rest_required_fields(request_type=compute.GetRegionSecurityPolicyRe response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.SecurityPolicy.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get(request) + response = client.delete(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_rest_unset_required_fields(): +def test_delete_rest_unset_required_fields(): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get._get_unset_required_fields({}) + unset_fields = transport.delete._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("requestId",)) & set( ( "project", @@ -1423,7 +1769,7 @@ def test_get_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_rest_interceptors(null_interceptor): +def test_delete_rest_interceptors(null_interceptor): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1436,14 +1782,14 @@ def test_get_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "post_get" + transports.RegionSecurityPoliciesRestInterceptor, "post_delete" ) as post, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "pre_get" + transports.RegionSecurityPoliciesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.GetRegionSecurityPolicyRequest.pb( - compute.GetRegionSecurityPolicyRequest() + pb_message = compute.DeleteRegionSecurityPolicyRequest.pb( + compute.DeleteRegionSecurityPolicyRequest() ) transcode.return_value = { "method": "post", @@ -1455,19 +1801,17 @@ def test_get_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.SecurityPolicy.to_json( - compute.SecurityPolicy() - ) + req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.GetRegionSecurityPolicyRequest() + request = compute.DeleteRegionSecurityPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.SecurityPolicy() + post.return_value = compute.Operation() - client.get( + client.delete( request, metadata=[ ("key", "val"), @@ -1479,8 +1823,8 @@ def test_get_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_rest_bad_request( - transport: str = "rest", request_type=compute.GetRegionSecurityPolicyRequest +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteRegionSecurityPolicyRequest ): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1504,10 +1848,10 @@ def test_get_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get(request) + client.delete(request) -def test_get_rest_flattened(): +def test_delete_rest_flattened(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1516,7 +1860,7 @@ def test_get_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.SecurityPolicy() + return_value = compute.Operation() # get arguments that satisfy an http rule for this method sample_request = { @@ -1537,12 +1881,12 @@ def test_get_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.SecurityPolicy.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get(**mock_args) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -1555,7 +1899,7 @@ def test_get_rest_flattened(): ) -def test_get_rest_flattened_error(transport: str = "rest"): +def test_delete_rest_flattened_error(transport: str = "rest"): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1564,15 +1908,15 @@ def test_get_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get( - compute.GetRegionSecurityPolicyRequest(), + client.delete( + compute.DeleteRegionSecurityPolicyRequest(), project="project_value", region="region_value", security_policy="security_policy_value", ) -def test_get_rest_error(): +def test_delete_rest_error(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1581,261 +1925,76 @@ def test_get_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.InsertRegionSecurityPolicyRequest, + compute.DeleteRegionSecurityPolicyRequest, dict, ], ) -def test_insert_rest(request_type): +def test_delete_unary_rest(request_type): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2"} - request_init["security_policy_resource"] = { - "adaptive_protection_config": { - "layer7_ddos_defense_config": { - "enable": True, - "rule_visibility": "rule_visibility_value", - } - }, - "advanced_options_config": { - "json_custom_config": { - "content_types": ["content_types_value1", "content_types_value2"] - }, - "json_parsing": "json_parsing_value", - "log_level": "log_level_value", - }, - "creation_timestamp": "creation_timestamp_value", - "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, - "region": "region_value", - "rules": [ - { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": [ - "src_ip_ranges_value1", - "src_ip_ranges_value2", - ] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } - ], - "self_link": "self_link_value", - "type_": "type__value", + request_init = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request = request_type(**request_init) - # Determine if the message type is proto-plus or protobuf - test_field = compute.InsertRegionSecurityPolicyRequest.meta.fields[ - "security_policy_resource" - ] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "security_policy_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["security_policy_resource"][field])): - del request_init["security_policy_resource"][field][i][subfield] - else: - del request_init["security_policy_resource"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.Operation( - client_operation_id="client_operation_id_value", - creation_timestamp="creation_timestamp_value", - description="description_value", - end_time="end_time_value", - http_error_message="http_error_message_value", - http_error_status_code=2374, - id=205, - insert_time="insert_time_value", - kind="kind_value", - name="name_value", - operation_group_id="operation_group_id_value", - operation_type="operation_type_value", - progress=885, - region="region_value", - self_link="self_link_value", - start_time="start_time_value", - status=compute.Operation.Status.DONE, - status_message="status_message_value", - target_id=947, - target_link="target_link_value", - user="user_value", - zone="zone_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.insert(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_unary(request) # Establish that the response is the type that we expect. - assert isinstance(response, extended_operation.ExtendedOperation) - assert response.client_operation_id == "client_operation_id_value" - assert response.creation_timestamp == "creation_timestamp_value" - assert response.description == "description_value" - assert response.end_time == "end_time_value" - assert response.http_error_message == "http_error_message_value" - assert response.http_error_status_code == 2374 - assert response.id == 205 - assert response.insert_time == "insert_time_value" - assert response.kind == "kind_value" - assert response.name == "name_value" - assert response.operation_group_id == "operation_group_id_value" - assert response.operation_type == "operation_type_value" - assert response.progress == 885 - assert response.region == "region_value" - assert response.self_link == "self_link_value" - assert response.start_time == "start_time_value" - assert response.status == compute.Operation.Status.DONE - assert response.status_message == "status_message_value" - assert response.target_id == 947 - assert response.target_link == "target_link_value" - assert response.user == "user_value" - assert response.zone == "zone_value" + assert isinstance(response, compute.Operation) -def test_insert_rest_required_fields( - request_type=compute.InsertRegionSecurityPolicyRequest, +def test_delete_unary_rest_required_fields( + request_type=compute.DeleteRegionSecurityPolicyRequest, ): transport_class = transports.RegionSecurityPoliciesRestTransport request_init = {} request_init["project"] = "" request_init["region"] = "" + request_init["security_policy"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -1850,24 +2009,20 @@ def test_insert_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).insert._get_unset_required_fields(jsonified_request) + ).delete._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["project"] = "project_value" jsonified_request["region"] = "region_value" + jsonified_request["securityPolicy"] = "security_policy_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).insert._get_unset_required_fields(jsonified_request) + ).delete._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "validate_only", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -1875,6 +2030,8 @@ def test_insert_rest_required_fields( assert jsonified_request["project"] == "project_value" assert "region" in jsonified_request assert jsonified_request["region"] == "region_value" + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == "security_policy_value" client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1895,10 +2052,9 @@ def test_insert_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -1911,38 +2067,33 @@ def test_insert_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.insert(request) + response = client.delete_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_insert_rest_unset_required_fields(): +def test_delete_unary_rest_unset_required_fields(): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.insert._get_unset_required_fields({}) + unset_fields = transport.delete._get_unset_required_fields({}) assert set(unset_fields) == ( - set( - ( - "requestId", - "validateOnly", - ) - ) + set(("requestId",)) & set( ( "project", "region", - "securityPolicyResource", + "securityPolicy", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_insert_rest_interceptors(null_interceptor): +def test_delete_unary_rest_interceptors(null_interceptor): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1955,14 +2106,14 @@ def test_insert_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "post_insert" + transports.RegionSecurityPoliciesRestInterceptor, "post_delete" ) as post, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "pre_insert" + transports.RegionSecurityPoliciesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.InsertRegionSecurityPolicyRequest.pb( - compute.InsertRegionSecurityPolicyRequest() + pb_message = compute.DeleteRegionSecurityPolicyRequest.pb( + compute.DeleteRegionSecurityPolicyRequest() ) transcode.return_value = { "method": "post", @@ -1976,7 +2127,7 @@ def test_insert_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.InsertRegionSecurityPolicyRequest() + request = compute.DeleteRegionSecurityPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -1984,7 +2135,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.insert( + client.delete_unary( request, metadata=[ ("key", "val"), @@ -1996,8 +2147,8 @@ def test_insert_rest_interceptors(null_interceptor): post.assert_called_once() -def test_insert_rest_bad_request( - transport: str = "rest", request_type=compute.InsertRegionSecurityPolicyRequest +def test_delete_unary_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteRegionSecurityPolicyRequest ): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2005,7 +2156,11 @@ def test_insert_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2"} + request_init = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2017,10 +2172,10 @@ def test_insert_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.insert(request) + client.delete_unary(request) -def test_insert_rest_flattened(): +def test_delete_unary_rest_flattened(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2032,19 +2187,17 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # get arguments that satisfy an http rule for this method - sample_request = {"project": "sample1", "region": "sample2"} + sample_request = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } # get truthy value for each flattened field mock_args = dict( project="project_value", region="region_value", - security_policy_resource=compute.SecurityPolicy( - adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( - layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( - enable=True - ) - ) - ), + security_policy="security_policy_value", ) mock_args.update(sample_request) @@ -2057,20 +2210,20 @@ def test_insert_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.insert(**mock_args) + client.delete_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies" + "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}" % client.transport._host, args[1], ) -def test_insert_rest_flattened_error(transport: str = "rest"): +def test_delete_unary_rest_flattened_error(transport: str = "rest"): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2079,21 +2232,15 @@ def test_insert_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.insert( - compute.InsertRegionSecurityPolicyRequest(), + client.delete_unary( + compute.DeleteRegionSecurityPolicyRequest(), project="project_value", region="region_value", - security_policy_resource=compute.SecurityPolicy( - adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( - layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( - enable=True - ) - ) - ), + security_policy="security_policy_value", ) -def test_insert_rest_error(): +def test_delete_unary_rest_error(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2102,52 +2249,2754 @@ def test_insert_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.InsertRegionSecurityPolicyRequest, + compute.GetRegionSecurityPolicyRequest, dict, ], ) -def test_insert_unary_rest(request_type): +def test_get_rest(request_type): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2"} - request_init["security_policy_resource"] = { - "adaptive_protection_config": { - "layer7_ddos_defense_config": { - "enable": True, - "rule_visibility": "rule_visibility_value", - } - }, - "advanced_options_config": { - "json_custom_config": { - "content_types": ["content_types_value1", "content_types_value2"] - }, - "json_parsing": "json_parsing_value", - "log_level": "log_level_value", - }, - "creation_timestamp": "creation_timestamp_value", - "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, - "region": "region_value", - "rules": [ - { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", + request_init = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicy( + creation_timestamp="creation_timestamp_value", + description="description_value", + fingerprint="fingerprint_value", + id=205, + kind="kind_value", + label_fingerprint="label_fingerprint_value", + name="name_value", + region="region_value", + self_link="self_link_value", + type_="type__value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SecurityPolicy) + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.fingerprint == "fingerprint_value" + assert response.id == 205 + assert response.kind == "kind_value" + assert response.label_fingerprint == "label_fingerprint_value" + assert response.name == "name_value" + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.type_ == "type__value" + + +def test_get_rest_required_fields(request_type=compute.GetRegionSecurityPolicyRequest): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["securityPolicy"] = "security_policy_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == "security_policy_value" + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "region", + "securityPolicy", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "post_get" + ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "pre_get" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionSecurityPolicyRequest.pb( + compute.GetRegionSecurityPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SecurityPolicy.to_json( + compute.SecurityPolicy() + ) + + request = compute.GetRegionSecurityPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SecurityPolicy() + + client.get( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionSecurityPolicyRequest +): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}" + % client.transport._host, + args[1], + ) + + +def test_get_rest_flattened_error(transport: str = "rest"): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionSecurityPolicyRequest(), + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + +def test_get_rest_error(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetRuleRegionSecurityPolicyRequest, + dict, + ], +) +def test_get_rule_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyRule( + action="action_value", + description="description_value", + kind="kind_value", + preview=True, + priority=898, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.SecurityPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SecurityPolicyRule) + assert response.action == "action_value" + assert response.description == "description_value" + assert response.kind == "kind_value" + assert response.preview is True + assert response.priority == 898 + + +def test_get_rule_rest_required_fields( + request_type=compute.GetRuleRegionSecurityPolicyRequest, +): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["securityPolicy"] = "security_policy_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == "security_policy_value" + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyRule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.SecurityPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_rule(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rule_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_rule._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("priority",)) + & set( + ( + "project", + "region", + "securityPolicy", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rule_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "post_get_rule" + ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "pre_get_rule" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRuleRegionSecurityPolicyRequest.pb( + compute.GetRuleRegionSecurityPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SecurityPolicyRule.to_json( + compute.SecurityPolicyRule() + ) + + request = compute.GetRuleRegionSecurityPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SecurityPolicyRule() + + client.get_rule( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rule_rest_bad_request( + transport: str = "rest", request_type=compute.GetRuleRegionSecurityPolicyRequest +): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_rule(request) + + +def test_get_rule_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyRule() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.SecurityPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}/getRule" + % client.transport._host, + args[1], + ) + + +def test_get_rule_rest_flattened_error(transport: str = "rest"): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_rule( + compute.GetRuleRegionSecurityPolicyRequest(), + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + +def test_get_rule_rest_error(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.InsertRegionSecurityPolicyRequest, + dict, + ], +) +def test_insert_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["security_policy_resource"] = { + "adaptive_protection_config": { + "layer7_ddos_defense_config": { + "enable": True, + "rule_visibility": "rule_visibility_value", + "threshold_configs": [ + { + "auto_deploy_confidence_threshold": 0.339, + "auto_deploy_expiration_sec": 2785, + "auto_deploy_impacted_baseline_threshold": 0.4121, + "auto_deploy_load_threshold": 0.2768, + "name": "name_value", + } + ], + } + }, + "advanced_options_config": { + "json_custom_config": { + "content_types": ["content_types_value1", "content_types_value2"] + }, + "json_parsing": "json_parsing_value", + "log_level": "log_level_value", + "user_ip_request_headers": [ + "user_ip_request_headers_value1", + "user_ip_request_headers_value2", + ], + }, + "creation_timestamp": "creation_timestamp_value", + "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, + "description": "description_value", + "fingerprint": "fingerprint_value", + "id": 205, + "kind": "kind_value", + "label_fingerprint": "label_fingerprint_value", + "labels": {}, + "name": "name_value", + "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, + "region": "region_value", + "rules": [ + { + "action": "action_value", + "description": "description_value", + "header_action": { + "request_headers_to_adds": [ + { + "header_name": "header_name_value", + "header_value": "header_value_value", + } + ] + }, + "kind": "kind_value", + "match": { + "config": { + "src_ip_ranges": [ + "src_ip_ranges_value1", + "src_ip_ranges_value2", + ] + }, + "expr": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "versioned_expr": "versioned_expr_value", + }, + "network_match": { + "dest_ip_ranges": [ + "dest_ip_ranges_value1", + "dest_ip_ranges_value2", + ], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": [ + "src_region_codes_value1", + "src_region_codes_value2", + ], + "user_defined_fields": [ + { + "name": "name_value", + "values": ["values_value1", "values_value2"], + } + ], + }, + "preconfigured_waf_config": { + "exclusions": [ + { + "request_cookies_to_exclude": [ + {"op": "op_value", "val": "val_value"} + ], + "request_headers_to_exclude": {}, + "request_query_params_to_exclude": {}, + "request_uris_to_exclude": {}, + "target_rule_ids": [ + "target_rule_ids_value1", + "target_rule_ids_value2", + ], + "target_rule_set": "target_rule_set_value", + } + ] + }, + "preview": True, + "priority": 898, + "rate_limit_options": { + "ban_duration_sec": 1680, + "ban_threshold": {"count": 553, "interval_sec": 1279}, + "conform_action": "conform_action_value", + "enforce_on_key": "enforce_on_key_value", + "enforce_on_key_configs": [ + { + "enforce_on_key_name": "enforce_on_key_name_value", + "enforce_on_key_type": "enforce_on_key_type_value", + } + ], + "enforce_on_key_name": "enforce_on_key_name_value", + "exceed_action": "exceed_action_value", + "exceed_redirect_options": { + "target": "target_value", + "type_": "type__value", + }, + "rate_limit_threshold": {}, + }, + "redirect_options": {}, + } + ], + "self_link": "self_link_value", + "type_": "type__value", + "user_defined_fields": [ + { + "base": "base_value", + "mask": "mask_value", + "name": "name_value", + "offset": 647, + "size": 443, + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionSecurityPolicyRequest.meta.fields[ + "security_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_policy_resource"][field])): + del request_init["security_policy_resource"][field][i][subfield] + else: + del request_init["security_policy_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_insert_rest_required_fields( + request_type=compute.InsertRegionSecurityPolicyRequest, +): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.insert(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "validateOnly", + ) + ) + & set( + ( + "project", + "region", + "securityPolicyResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "post_insert" + ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "pre_insert" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionSecurityPolicyRequest.pb( + compute.InsertRegionSecurityPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionSecurityPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertRegionSecurityPolicyRequest +): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + security_policy_resource=compute.SecurityPolicy( + adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( + layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( + enable=True + ) + ) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionSecurityPolicyRequest(), + project="project_value", + region="region_value", + security_policy_resource=compute.SecurityPolicy( + adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( + layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( + enable=True + ) + ) + ), + ) + + +def test_insert_rest_error(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.InsertRegionSecurityPolicyRequest, + dict, + ], +) +def test_insert_unary_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["security_policy_resource"] = { + "adaptive_protection_config": { + "layer7_ddos_defense_config": { + "enable": True, + "rule_visibility": "rule_visibility_value", + "threshold_configs": [ + { + "auto_deploy_confidence_threshold": 0.339, + "auto_deploy_expiration_sec": 2785, + "auto_deploy_impacted_baseline_threshold": 0.4121, + "auto_deploy_load_threshold": 0.2768, + "name": "name_value", + } + ], + } + }, + "advanced_options_config": { + "json_custom_config": { + "content_types": ["content_types_value1", "content_types_value2"] + }, + "json_parsing": "json_parsing_value", + "log_level": "log_level_value", + "user_ip_request_headers": [ + "user_ip_request_headers_value1", + "user_ip_request_headers_value2", + ], + }, + "creation_timestamp": "creation_timestamp_value", + "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, + "description": "description_value", + "fingerprint": "fingerprint_value", + "id": 205, + "kind": "kind_value", + "label_fingerprint": "label_fingerprint_value", + "labels": {}, + "name": "name_value", + "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, + "region": "region_value", + "rules": [ + { + "action": "action_value", + "description": "description_value", + "header_action": { + "request_headers_to_adds": [ + { + "header_name": "header_name_value", + "header_value": "header_value_value", + } + ] + }, + "kind": "kind_value", + "match": { + "config": { + "src_ip_ranges": [ + "src_ip_ranges_value1", + "src_ip_ranges_value2", + ] + }, + "expr": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "versioned_expr": "versioned_expr_value", + }, + "network_match": { + "dest_ip_ranges": [ + "dest_ip_ranges_value1", + "dest_ip_ranges_value2", + ], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": [ + "src_region_codes_value1", + "src_region_codes_value2", + ], + "user_defined_fields": [ + { + "name": "name_value", + "values": ["values_value1", "values_value2"], + } + ], + }, + "preconfigured_waf_config": { + "exclusions": [ + { + "request_cookies_to_exclude": [ + {"op": "op_value", "val": "val_value"} + ], + "request_headers_to_exclude": {}, + "request_query_params_to_exclude": {}, + "request_uris_to_exclude": {}, + "target_rule_ids": [ + "target_rule_ids_value1", + "target_rule_ids_value2", + ], + "target_rule_set": "target_rule_set_value", + } + ] + }, + "preview": True, + "priority": 898, + "rate_limit_options": { + "ban_duration_sec": 1680, + "ban_threshold": {"count": 553, "interval_sec": 1279}, + "conform_action": "conform_action_value", + "enforce_on_key": "enforce_on_key_value", + "enforce_on_key_configs": [ + { + "enforce_on_key_name": "enforce_on_key_name_value", + "enforce_on_key_type": "enforce_on_key_type_value", + } + ], + "enforce_on_key_name": "enforce_on_key_name_value", + "exceed_action": "exceed_action_value", + "exceed_redirect_options": { + "target": "target_value", + "type_": "type__value", + }, + "rate_limit_threshold": {}, + }, + "redirect_options": {}, + } + ], + "self_link": "self_link_value", + "type_": "type__value", + "user_defined_fields": [ + { + "base": "base_value", + "mask": "mask_value", + "name": "name_value", + "offset": 647, + "size": 443, + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionSecurityPolicyRequest.meta.fields[ + "security_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_policy_resource"][field])): + del request_init["security_policy_resource"][field][i][subfield] + else: + del request_init["security_policy_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields( + request_type=compute.InsertRegionSecurityPolicyRequest, +): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "validateOnly", + ) + ) + & set( + ( + "project", + "region", + "securityPolicyResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "post_insert" + ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "pre_insert" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionSecurityPolicyRequest.pb( + compute.InsertRegionSecurityPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionSecurityPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request( + transport: str = "rest", request_type=compute.InsertRegionSecurityPolicyRequest +): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + security_policy_resource=compute.SecurityPolicy( + adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( + layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( + enable=True + ) + ) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies" + % client.transport._host, + args[1], + ) + + +def test_insert_unary_rest_flattened_error(transport: str = "rest"): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionSecurityPolicyRequest(), + project="project_value", + region="region_value", + security_policy_resource=compute.SecurityPolicy( + adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( + layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( + enable=True + ) + ) + ), + ) + + +def test_insert_unary_rest_error(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListRegionSecurityPoliciesRequest, + dict, + ], +) +def test_list_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyList( + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + + +def test_list_rest_required_fields( + request_type=compute.ListRegionSecurityPoliciesRequest, +): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set( + ( + "project", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "post_list" + ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "pre_list" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionSecurityPoliciesRequest.pb( + compute.ListRegionSecurityPoliciesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SecurityPolicyList.to_json( + compute.SecurityPolicyList() + ) + + request = compute.ListRegionSecurityPoliciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SecurityPolicyList() + + client.list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionSecurityPoliciesRequest +): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyList() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies" + % client.transport._host, + args[1], + ) + + +def test_list_rest_flattened_error(transport: str = "rest"): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionSecurityPoliciesRequest(), + project="project_value", + region="region_value", + ) + + +def test_list_rest_pager(transport: str = "rest"): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.SecurityPolicyList( + items=[ + compute.SecurityPolicy(), + compute.SecurityPolicy(), + compute.SecurityPolicy(), + ], + next_page_token="abc", + ), + compute.SecurityPolicyList( + items=[], + next_page_token="def", + ), + compute.SecurityPolicyList( + items=[ + compute.SecurityPolicy(), + ], + next_page_token="ghi", + ), + compute.SecurityPolicyList( + items=[ + compute.SecurityPolicy(), + compute.SecurityPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.SecurityPolicyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1", "region": "sample2"} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.SecurityPolicy) for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchRegionSecurityPolicyRequest, + dict, + ], +) +def test_patch_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } + request_init["security_policy_resource"] = { + "adaptive_protection_config": { + "layer7_ddos_defense_config": { + "enable": True, + "rule_visibility": "rule_visibility_value", + "threshold_configs": [ + { + "auto_deploy_confidence_threshold": 0.339, + "auto_deploy_expiration_sec": 2785, + "auto_deploy_impacted_baseline_threshold": 0.4121, + "auto_deploy_load_threshold": 0.2768, + "name": "name_value", + } + ], + } + }, + "advanced_options_config": { + "json_custom_config": { + "content_types": ["content_types_value1", "content_types_value2"] + }, + "json_parsing": "json_parsing_value", + "log_level": "log_level_value", + "user_ip_request_headers": [ + "user_ip_request_headers_value1", + "user_ip_request_headers_value2", + ], + }, + "creation_timestamp": "creation_timestamp_value", + "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, + "description": "description_value", + "fingerprint": "fingerprint_value", + "id": 205, + "kind": "kind_value", + "label_fingerprint": "label_fingerprint_value", + "labels": {}, + "name": "name_value", + "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, + "region": "region_value", + "rules": [ + { + "action": "action_value", + "description": "description_value", + "header_action": { + "request_headers_to_adds": [ + { + "header_name": "header_name_value", + "header_value": "header_value_value", + } + ] + }, + "kind": "kind_value", + "match": { + "config": { + "src_ip_ranges": [ + "src_ip_ranges_value1", + "src_ip_ranges_value2", + ] + }, + "expr": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "versioned_expr": "versioned_expr_value", + }, + "network_match": { + "dest_ip_ranges": [ + "dest_ip_ranges_value1", + "dest_ip_ranges_value2", + ], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": [ + "src_region_codes_value1", + "src_region_codes_value2", + ], + "user_defined_fields": [ + { + "name": "name_value", + "values": ["values_value1", "values_value2"], + } + ], + }, + "preconfigured_waf_config": { + "exclusions": [ + { + "request_cookies_to_exclude": [ + {"op": "op_value", "val": "val_value"} + ], + "request_headers_to_exclude": {}, + "request_query_params_to_exclude": {}, + "request_uris_to_exclude": {}, + "target_rule_ids": [ + "target_rule_ids_value1", + "target_rule_ids_value2", + ], + "target_rule_set": "target_rule_set_value", + } + ] + }, + "preview": True, + "priority": 898, + "rate_limit_options": { + "ban_duration_sec": 1680, + "ban_threshold": {"count": 553, "interval_sec": 1279}, + "conform_action": "conform_action_value", + "enforce_on_key": "enforce_on_key_value", + "enforce_on_key_configs": [ + { + "enforce_on_key_name": "enforce_on_key_name_value", + "enforce_on_key_type": "enforce_on_key_type_value", + } + ], + "enforce_on_key_name": "enforce_on_key_name_value", + "exceed_action": "exceed_action_value", + "exceed_redirect_options": { + "target": "target_value", + "type_": "type__value", + }, + "rate_limit_threshold": {}, + }, + "redirect_options": {}, + } + ], + "self_link": "self_link_value", + "type_": "type__value", + "user_defined_fields": [ + { + "base": "base_value", + "mask": "mask_value", + "name": "name_value", + "offset": 647, + "size": 443, + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionSecurityPolicyRequest.meta.fields[ + "security_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_policy_resource"][field])): + del request_init["security_policy_resource"][field][i][subfield] + else: + del request_init["security_policy_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_patch_rest_required_fields( + request_type=compute.PatchRegionSecurityPolicyRequest, +): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["securityPolicy"] = "security_policy_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == "security_policy_value" + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "project", + "region", + "securityPolicy", + "securityPolicyResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionSecurityPolicyRequest.pb( + compute.PatchRegionSecurityPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionSecurityPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRegionSecurityPolicyRequest +): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + security_policy="security_policy_value", + security_policy_resource=compute.SecurityPolicy( + adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( + layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( + enable=True + ) + ) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionSecurityPolicyRequest(), + project="project_value", + region="region_value", + security_policy="security_policy_value", + security_policy_resource=compute.SecurityPolicy( + adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( + layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( + enable=True + ) + ) + ), + ) + + +def test_patch_rest_error(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchRegionSecurityPolicyRequest, + dict, + ], +) +def test_patch_unary_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } + request_init["security_policy_resource"] = { + "adaptive_protection_config": { + "layer7_ddos_defense_config": { + "enable": True, + "rule_visibility": "rule_visibility_value", + "threshold_configs": [ + { + "auto_deploy_confidence_threshold": 0.339, + "auto_deploy_expiration_sec": 2785, + "auto_deploy_impacted_baseline_threshold": 0.4121, + "auto_deploy_load_threshold": 0.2768, + "name": "name_value", + } + ], + } + }, + "advanced_options_config": { + "json_custom_config": { + "content_types": ["content_types_value1", "content_types_value2"] + }, + "json_parsing": "json_parsing_value", + "log_level": "log_level_value", + "user_ip_request_headers": [ + "user_ip_request_headers_value1", + "user_ip_request_headers_value2", + ], + }, + "creation_timestamp": "creation_timestamp_value", + "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, + "description": "description_value", + "fingerprint": "fingerprint_value", + "id": 205, + "kind": "kind_value", + "label_fingerprint": "label_fingerprint_value", + "labels": {}, + "name": "name_value", + "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, + "region": "region_value", + "rules": [ + { + "action": "action_value", + "description": "description_value", + "header_action": { + "request_headers_to_adds": [ + { + "header_name": "header_name_value", + "header_value": "header_value_value", } ] }, @@ -2167,6 +5016,27 @@ def test_insert_unary_rest(request_type): }, "versioned_expr": "versioned_expr_value", }, + "network_match": { + "dest_ip_ranges": [ + "dest_ip_ranges_value1", + "dest_ip_ranges_value2", + ], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": [ + "src_region_codes_value1", + "src_region_codes_value2", + ], + "user_defined_fields": [ + { + "name": "name_value", + "values": ["values_value1", "values_value2"], + } + ], + }, "preconfigured_waf_config": { "exclusions": [ { @@ -2198,26 +5068,532 @@ def test_insert_unary_rest(request_type): } ], "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } - ], - "self_link": "self_link_value", - "type_": "type__value", + "exceed_action": "exceed_action_value", + "exceed_redirect_options": { + "target": "target_value", + "type_": "type__value", + }, + "rate_limit_threshold": {}, + }, + "redirect_options": {}, + } + ], + "self_link": "self_link_value", + "type_": "type__value", + "user_defined_fields": [ + { + "base": "base_value", + "mask": "mask_value", + "name": "name_value", + "offset": 647, + "size": 443, + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionSecurityPolicyRequest.meta.fields[ + "security_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_policy_resource"][field])): + del request_init["security_policy_resource"][field][i][subfield] + else: + del request_init["security_policy_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields( + request_type=compute.PatchRegionSecurityPolicyRequest, +): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["securityPolicy"] = "security_policy_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == "security_policy_value" + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "project", + "region", + "securityPolicy", + "securityPolicyResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionSecurityPolicyRequest.pb( + compute.PatchRegionSecurityPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionSecurityPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRegionSecurityPolicyRequest +): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + security_policy="security_policy_value", + security_policy_resource=compute.SecurityPolicy( + adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( + layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( + enable=True + ) + ) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}" + % client.transport._host, + args[1], + ) + + +def test_patch_unary_rest_flattened_error(transport: str = "rest"): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRegionSecurityPolicyRequest(), + project="project_value", + region="region_value", + security_policy="security_policy_value", + security_policy_resource=compute.SecurityPolicy( + adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( + layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( + enable=True + ) + ) + ), + ) + + +def test_patch_unary_rest_error(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchRuleRegionSecurityPolicyRequest, + dict, + ], +) +def test_patch_rule_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } + request_init["security_policy_rule_resource"] = { + "action": "action_value", + "description": "description_value", + "header_action": { + "request_headers_to_adds": [ + { + "header_name": "header_name_value", + "header_value": "header_value_value", + } + ] + }, + "kind": "kind_value", + "match": { + "config": { + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"] + }, + "expr": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "versioned_expr": "versioned_expr_value", + }, + "network_match": { + "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], + "user_defined_fields": [ + {"name": "name_value", "values": ["values_value1", "values_value2"]} + ], + }, + "preconfigured_waf_config": { + "exclusions": [ + { + "request_cookies_to_exclude": [ + {"op": "op_value", "val": "val_value"} + ], + "request_headers_to_exclude": {}, + "request_query_params_to_exclude": {}, + "request_uris_to_exclude": {}, + "target_rule_ids": [ + "target_rule_ids_value1", + "target_rule_ids_value2", + ], + "target_rule_set": "target_rule_set_value", + } + ] + }, + "preview": True, + "priority": 898, + "rate_limit_options": { + "ban_duration_sec": 1680, + "ban_threshold": {"count": 553, "interval_sec": 1279}, + "conform_action": "conform_action_value", + "enforce_on_key": "enforce_on_key_value", + "enforce_on_key_configs": [ + { + "enforce_on_key_name": "enforce_on_key_name_value", + "enforce_on_key_type": "enforce_on_key_type_value", + } + ], + "enforce_on_key_name": "enforce_on_key_name_value", + "exceed_action": "exceed_action_value", + "exceed_redirect_options": { + "target": "target_value", + "type_": "type__value", + }, + "rate_limit_threshold": {}, + }, + "redirect_options": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = compute.InsertRegionSecurityPolicyRequest.meta.fields[ - "security_policy_resource" + test_field = compute.PatchRuleRegionSecurityPolicyRequest.meta.fields[ + "security_policy_rule_resource" ] def get_message_fields(field): @@ -2247,7 +5623,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime for field, value in request_init[ - "security_policy_resource" + "security_policy_rule_resource" ].items(): # pragma: NO COVER result = None is_repeated = False @@ -2278,10 +5654,14 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["security_policy_resource"][field])): - del request_init["security_policy_resource"][field][i][subfield] + for i in range( + 0, len(request_init["security_policy_rule_resource"][field]) + ): + del request_init["security_policy_rule_resource"][field][i][ + subfield + ] else: - del request_init["security_policy_resource"][field][subfield] + del request_init["security_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2321,20 +5701,43 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.insert_unary(request) + response = client.patch_rule(request) # Establish that the response is the type that we expect. - assert isinstance(response, compute.Operation) + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" -def test_insert_unary_rest_required_fields( - request_type=compute.InsertRegionSecurityPolicyRequest, +def test_patch_rule_rest_required_fields( + request_type=compute.PatchRuleRegionSecurityPolicyRequest, ): transport_class = transports.RegionSecurityPoliciesRestTransport request_init = {} request_init["project"] = "" request_init["region"] = "" + request_init["security_policy"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -2349,21 +5752,23 @@ def test_insert_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).insert._get_unset_required_fields(jsonified_request) + ).patch_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["project"] = "project_value" jsonified_request["region"] = "region_value" + jsonified_request["securityPolicy"] = "security_policy_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).insert._get_unset_required_fields(jsonified_request) + ).patch_rule._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "request_id", + "priority", + "update_mask", "validate_only", ) ) @@ -2374,6 +5779,8 @@ def test_insert_unary_rest_required_fields( assert jsonified_request["project"] == "project_value" assert "region" in jsonified_request assert jsonified_request["region"] == "region_value" + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == "security_policy_value" client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2410,23 +5817,24 @@ def test_insert_unary_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.insert_unary(request) + response = client.patch_rule(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_insert_unary_rest_unset_required_fields(): +def test_patch_rule_rest_unset_required_fields(): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.insert._get_unset_required_fields({}) + unset_fields = transport.patch_rule._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "requestId", + "priority", + "updateMask", "validateOnly", ) ) @@ -2434,14 +5842,15 @@ def test_insert_unary_rest_unset_required_fields(): ( "project", "region", - "securityPolicyResource", + "securityPolicy", + "securityPolicyRuleResource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_insert_unary_rest_interceptors(null_interceptor): +def test_patch_rule_rest_interceptors(null_interceptor): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2454,14 +5863,14 @@ def test_insert_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "post_insert" + transports.RegionSecurityPoliciesRestInterceptor, "post_patch_rule" ) as post, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "pre_insert" + transports.RegionSecurityPoliciesRestInterceptor, "pre_patch_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.InsertRegionSecurityPolicyRequest.pb( - compute.InsertRegionSecurityPolicyRequest() + pb_message = compute.PatchRuleRegionSecurityPolicyRequest.pb( + compute.PatchRuleRegionSecurityPolicyRequest() ) transcode.return_value = { "method": "post", @@ -2475,7 +5884,7 @@ def test_insert_unary_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.InsertRegionSecurityPolicyRequest() + request = compute.PatchRuleRegionSecurityPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -2483,7 +5892,7 @@ def test_insert_unary_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.insert_unary( + client.patch_rule( request, metadata=[ ("key", "val"), @@ -2495,8 +5904,8 @@ def test_insert_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_insert_unary_rest_bad_request( - transport: str = "rest", request_type=compute.InsertRegionSecurityPolicyRequest +def test_patch_rule_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRuleRegionSecurityPolicyRequest ): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2504,7 +5913,11 @@ def test_insert_unary_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2"} + request_init = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2516,10 +5929,10 @@ def test_insert_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.insert_unary(request) + client.patch_rule(request) -def test_insert_unary_rest_flattened(): +def test_patch_rule_rest_flattened(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2531,18 +5944,19 @@ def test_insert_unary_rest_flattened(): return_value = compute.Operation() # get arguments that satisfy an http rule for this method - sample_request = {"project": "sample1", "region": "sample2"} + sample_request = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } # get truthy value for each flattened field mock_args = dict( project="project_value", region="region_value", - security_policy_resource=compute.SecurityPolicy( - adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( - layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( - enable=True - ) - ) + security_policy="security_policy_value", + security_policy_rule_resource=compute.SecurityPolicyRule( + action="action_value" ), ) mock_args.update(sample_request) @@ -2556,20 +5970,20 @@ def test_insert_unary_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.insert_unary(**mock_args) + client.patch_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies" + "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}/patchRule" % client.transport._host, args[1], ) -def test_insert_unary_rest_flattened_error(transport: str = "rest"): +def test_patch_rule_rest_flattened_error(transport: str = "rest"): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2578,21 +5992,18 @@ def test_insert_unary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.insert_unary( - compute.InsertRegionSecurityPolicyRequest(), + client.patch_rule( + compute.PatchRuleRegionSecurityPolicyRequest(), project="project_value", region="region_value", - security_policy_resource=compute.SecurityPolicy( - adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( - layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( - enable=True - ) - ) + security_policy="security_policy_value", + security_policy_rule_resource=compute.SecurityPolicyRule( + action="action_value" ), ) -def test_insert_unary_rest_error(): +def test_patch_rule_rest_error(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2601,55 +6012,227 @@ def test_insert_unary_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.ListRegionSecurityPoliciesRequest, + compute.PatchRuleRegionSecurityPolicyRequest, dict, ], ) -def test_list_rest(request_type): +def test_patch_rule_unary_rest(request_type): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2"} + request_init = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } + request_init["security_policy_rule_resource"] = { + "action": "action_value", + "description": "description_value", + "header_action": { + "request_headers_to_adds": [ + { + "header_name": "header_name_value", + "header_value": "header_value_value", + } + ] + }, + "kind": "kind_value", + "match": { + "config": { + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"] + }, + "expr": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "versioned_expr": "versioned_expr_value", + }, + "network_match": { + "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], + "user_defined_fields": [ + {"name": "name_value", "values": ["values_value1", "values_value2"]} + ], + }, + "preconfigured_waf_config": { + "exclusions": [ + { + "request_cookies_to_exclude": [ + {"op": "op_value", "val": "val_value"} + ], + "request_headers_to_exclude": {}, + "request_query_params_to_exclude": {}, + "request_uris_to_exclude": {}, + "target_rule_ids": [ + "target_rule_ids_value1", + "target_rule_ids_value2", + ], + "target_rule_set": "target_rule_set_value", + } + ] + }, + "preview": True, + "priority": 898, + "rate_limit_options": { + "ban_duration_sec": 1680, + "ban_threshold": {"count": 553, "interval_sec": 1279}, + "conform_action": "conform_action_value", + "enforce_on_key": "enforce_on_key_value", + "enforce_on_key_configs": [ + { + "enforce_on_key_name": "enforce_on_key_name_value", + "enforce_on_key_type": "enforce_on_key_type_value", + } + ], + "enforce_on_key_name": "enforce_on_key_name_value", + "exceed_action": "exceed_action_value", + "exceed_redirect_options": { + "target": "target_value", + "type_": "type__value", + }, + "rate_limit_threshold": {}, + }, + "redirect_options": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRuleRegionSecurityPolicyRequest.meta.fields[ + "security_policy_rule_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_rule_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_rule_resource"][field]) + ): + del request_init["security_policy_rule_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_rule_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.SecurityPolicyList( - id="id_value", + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", kind="kind_value", - next_page_token="next_page_token_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.SecurityPolicyList.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list(request) + response = client.patch_rule_unary(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPager) - assert response.id == "id_value" - assert response.kind == "kind_value" - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, compute.Operation) -def test_list_rest_required_fields( - request_type=compute.ListRegionSecurityPoliciesRequest, +def test_patch_rule_unary_rest_required_fields( + request_type=compute.PatchRuleRegionSecurityPolicyRequest, ): transport_class = transports.RegionSecurityPoliciesRestTransport request_init = {} request_init["project"] = "" request_init["region"] = "" + request_init["security_policy"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -2664,25 +6247,24 @@ def test_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list._get_unset_required_fields(jsonified_request) + ).patch_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["project"] = "project_value" jsonified_request["region"] = "region_value" + jsonified_request["securityPolicy"] = "security_policy_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list._get_unset_required_fields(jsonified_request) + ).patch_rule._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "max_results", - "order_by", - "page_token", - "return_partial_success", + "priority", + "update_mask", + "validate_only", ) ) jsonified_request.update(unset_fields) @@ -2692,6 +6274,8 @@ def test_list_rest_required_fields( assert jsonified_request["project"] == "project_value" assert "region" in jsonified_request assert jsonified_request["region"] == "region_value" + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == "security_policy_value" client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2700,7 +6284,7 @@ def test_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.SecurityPolicyList() + return_value = compute.Operation() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2712,55 +6296,56 @@ def test_list_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.SecurityPolicyList.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list(request) + response = client.patch_rule_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_rest_unset_required_fields(): +def test_patch_rule_unary_rest_unset_required_fields(): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list._get_unset_required_fields({}) + unset_fields = transport.patch_rule._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "maxResults", - "orderBy", - "pageToken", - "returnPartialSuccess", + "priority", + "updateMask", + "validateOnly", ) ) & set( ( "project", "region", + "securityPolicy", + "securityPolicyRuleResource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_rest_interceptors(null_interceptor): +def test_patch_rule_unary_rest_interceptors(null_interceptor): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2773,14 +6358,14 @@ def test_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "post_list" + transports.RegionSecurityPoliciesRestInterceptor, "post_patch_rule" ) as post, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "pre_list" + transports.RegionSecurityPoliciesRestInterceptor, "pre_patch_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.ListRegionSecurityPoliciesRequest.pb( - compute.ListRegionSecurityPoliciesRequest() + pb_message = compute.PatchRuleRegionSecurityPolicyRequest.pb( + compute.PatchRuleRegionSecurityPolicyRequest() ) transcode.return_value = { "method": "post", @@ -2792,19 +6377,17 @@ def test_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = compute.SecurityPolicyList.to_json( - compute.SecurityPolicyList() - ) + req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.ListRegionSecurityPoliciesRequest() + request = compute.PatchRuleRegionSecurityPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.SecurityPolicyList() + post.return_value = compute.Operation() - client.list( + client.patch_rule_unary( request, metadata=[ ("key", "val"), @@ -2816,8 +6399,8 @@ def test_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_rest_bad_request( - transport: str = "rest", request_type=compute.ListRegionSecurityPoliciesRequest +def test_patch_rule_unary_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRuleRegionSecurityPolicyRequest ): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2825,7 +6408,11 @@ def test_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2"} + request_init = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2837,10 +6424,10 @@ def test_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list(request) + client.patch_rule_unary(request) -def test_list_rest_flattened(): +def test_patch_rule_unary_rest_flattened(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2849,15 +6436,23 @@ def test_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.SecurityPolicyList() + return_value = compute.Operation() # get arguments that satisfy an http rule for this method - sample_request = {"project": "sample1", "region": "sample2"} + sample_request = { + "project": "sample1", + "region": "sample2", + "security_policy": "sample3", + } # get truthy value for each flattened field mock_args = dict( project="project_value", region="region_value", + security_policy="security_policy_value", + security_policy_rule_resource=compute.SecurityPolicyRule( + action="action_value" + ), ) mock_args.update(sample_request) @@ -2865,25 +6460,25 @@ def test_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.SecurityPolicyList.pb(return_value) + return_value = compute.Operation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list(**mock_args) + client.patch_rule_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies" + "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}/patchRule" % client.transport._host, args[1], ) -def test_list_rest_flattened_error(transport: str = "rest"): +def test_patch_rule_unary_rest_flattened_error(transport: str = "rest"): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2892,82 +6487,31 @@ def test_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list( - compute.ListRegionSecurityPoliciesRequest(), + client.patch_rule_unary( + compute.PatchRuleRegionSecurityPolicyRequest(), project="project_value", region="region_value", + security_policy="security_policy_value", + security_policy_rule_resource=compute.SecurityPolicyRule( + action="action_value" + ), ) -def test_list_rest_pager(transport: str = "rest"): +def test_patch_rule_unary_rest_error(): client = RegionSecurityPoliciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - compute.SecurityPolicyList( - items=[ - compute.SecurityPolicy(), - compute.SecurityPolicy(), - compute.SecurityPolicy(), - ], - next_page_token="abc", - ), - compute.SecurityPolicyList( - items=[], - next_page_token="def", - ), - compute.SecurityPolicyList( - items=[ - compute.SecurityPolicy(), - ], - next_page_token="ghi", - ), - compute.SecurityPolicyList( - items=[ - compute.SecurityPolicy(), - compute.SecurityPolicy(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(compute.SecurityPolicyList.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"project": "sample1", "region": "sample2"} - - pager = client.list(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, compute.SecurityPolicy) for i in results) - - pages = list(client.list(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - compute.PatchRegionSecurityPolicyRequest, + compute.RemoveRuleRegionSecurityPolicyRequest, dict, ], ) -def test_patch_rest(request_type): +def test_remove_rule_rest(request_type): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2979,174 +6523,6 @@ def test_patch_rest(request_type): "region": "sample2", "security_policy": "sample3", } - request_init["security_policy_resource"] = { - "adaptive_protection_config": { - "layer7_ddos_defense_config": { - "enable": True, - "rule_visibility": "rule_visibility_value", - } - }, - "advanced_options_config": { - "json_custom_config": { - "content_types": ["content_types_value1", "content_types_value2"] - }, - "json_parsing": "json_parsing_value", - "log_level": "log_level_value", - }, - "creation_timestamp": "creation_timestamp_value", - "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, - "region": "region_value", - "rules": [ - { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": [ - "src_ip_ranges_value1", - "src_ip_ranges_value2", - ] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } - ], - "self_link": "self_link_value", - "type_": "type__value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = compute.PatchRegionSecurityPolicyRequest.meta.fields[ - "security_policy_resource" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "security_policy_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["security_policy_resource"][field])): - del request_init["security_policy_resource"][field][i][subfield] - else: - del request_init["security_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3186,7 +6562,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch(request) + response = client.remove_rule(request) # Establish that the response is the type that we expect. assert isinstance(response, extended_operation.ExtendedOperation) @@ -3214,8 +6590,8 @@ def get_message_fields(field): assert response.zone == "zone_value" -def test_patch_rest_required_fields( - request_type=compute.PatchRegionSecurityPolicyRequest, +def test_remove_rule_rest_required_fields( + request_type=compute.RemoveRuleRegionSecurityPolicyRequest, ): transport_class = transports.RegionSecurityPoliciesRestTransport @@ -3237,7 +6613,7 @@ def test_patch_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).remove_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -3248,9 +6624,9 @@ def test_patch_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).remove_rule._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set(("priority",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -3280,10 +6656,9 @@ def test_patch_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -3296,34 +6671,33 @@ def test_patch_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch(request) + response = client.remove_rule(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_patch_rest_unset_required_fields(): +def test_remove_rule_rest_unset_required_fields(): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.patch._get_unset_required_fields({}) + unset_fields = transport.remove_rule._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) + set(("priority",)) & set( ( "project", "region", "securityPolicy", - "securityPolicyResource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_patch_rest_interceptors(null_interceptor): +def test_remove_rule_rest_interceptors(null_interceptor): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3336,14 +6710,14 @@ def test_patch_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "post_patch" + transports.RegionSecurityPoliciesRestInterceptor, "post_remove_rule" ) as post, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "pre_patch" + transports.RegionSecurityPoliciesRestInterceptor, "pre_remove_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.PatchRegionSecurityPolicyRequest.pb( - compute.PatchRegionSecurityPolicyRequest() + pb_message = compute.RemoveRuleRegionSecurityPolicyRequest.pb( + compute.RemoveRuleRegionSecurityPolicyRequest() ) transcode.return_value = { "method": "post", @@ -3357,7 +6731,7 @@ def test_patch_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.PatchRegionSecurityPolicyRequest() + request = compute.RemoveRuleRegionSecurityPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -3365,7 +6739,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.patch( + client.remove_rule( request, metadata=[ ("key", "val"), @@ -3377,8 +6751,8 @@ def test_patch_rest_interceptors(null_interceptor): post.assert_called_once() -def test_patch_rest_bad_request( - transport: str = "rest", request_type=compute.PatchRegionSecurityPolicyRequest +def test_remove_rule_rest_bad_request( + transport: str = "rest", request_type=compute.RemoveRuleRegionSecurityPolicyRequest ): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3402,10 +6776,10 @@ def test_patch_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.patch(request) + client.remove_rule(request) -def test_patch_rest_flattened(): +def test_remove_rule_rest_flattened(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3428,13 +6802,6 @@ def test_patch_rest_flattened(): project="project_value", region="region_value", security_policy="security_policy_value", - security_policy_resource=compute.SecurityPolicy( - adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( - layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( - enable=True - ) - ) - ), ) mock_args.update(sample_request) @@ -3447,20 +6814,20 @@ def test_patch_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.patch(**mock_args) + client.remove_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}" + "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}/removeRule" % client.transport._host, args[1], ) -def test_patch_rest_flattened_error(transport: str = "rest"): +def test_remove_rule_rest_flattened_error(transport: str = "rest"): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3469,22 +6836,15 @@ def test_patch_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.patch( - compute.PatchRegionSecurityPolicyRequest(), + client.remove_rule( + compute.RemoveRuleRegionSecurityPolicyRequest(), project="project_value", region="region_value", security_policy="security_policy_value", - security_policy_resource=compute.SecurityPolicy( - adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( - layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( - enable=True - ) - ) - ), ) -def test_patch_rest_error(): +def test_remove_rule_rest_error(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -3493,11 +6853,11 @@ def test_patch_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.PatchRegionSecurityPolicyRequest, + compute.RemoveRuleRegionSecurityPolicyRequest, dict, ], ) -def test_patch_unary_rest(request_type): +def test_remove_rule_unary_rest(request_type): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3509,174 +6869,6 @@ def test_patch_unary_rest(request_type): "region": "sample2", "security_policy": "sample3", } - request_init["security_policy_resource"] = { - "adaptive_protection_config": { - "layer7_ddos_defense_config": { - "enable": True, - "rule_visibility": "rule_visibility_value", - } - }, - "advanced_options_config": { - "json_custom_config": { - "content_types": ["content_types_value1", "content_types_value2"] - }, - "json_parsing": "json_parsing_value", - "log_level": "log_level_value", - }, - "creation_timestamp": "creation_timestamp_value", - "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, - "description": "description_value", - "fingerprint": "fingerprint_value", - "id": 205, - "kind": "kind_value", - "label_fingerprint": "label_fingerprint_value", - "labels": {}, - "name": "name_value", - "recaptcha_options_config": {"redirect_site_key": "redirect_site_key_value"}, - "region": "region_value", - "rules": [ - { - "action": "action_value", - "description": "description_value", - "header_action": { - "request_headers_to_adds": [ - { - "header_name": "header_name_value", - "header_value": "header_value_value", - } - ] - }, - "kind": "kind_value", - "match": { - "config": { - "src_ip_ranges": [ - "src_ip_ranges_value1", - "src_ip_ranges_value2", - ] - }, - "expr": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "versioned_expr": "versioned_expr_value", - }, - "preconfigured_waf_config": { - "exclusions": [ - { - "request_cookies_to_exclude": [ - {"op": "op_value", "val": "val_value"} - ], - "request_headers_to_exclude": {}, - "request_query_params_to_exclude": {}, - "request_uris_to_exclude": {}, - "target_rule_ids": [ - "target_rule_ids_value1", - "target_rule_ids_value2", - ], - "target_rule_set": "target_rule_set_value", - } - ] - }, - "preview": True, - "priority": 898, - "rate_limit_options": { - "ban_duration_sec": 1680, - "ban_threshold": {"count": 553, "interval_sec": 1279}, - "conform_action": "conform_action_value", - "enforce_on_key": "enforce_on_key_value", - "enforce_on_key_configs": [ - { - "enforce_on_key_name": "enforce_on_key_name_value", - "enforce_on_key_type": "enforce_on_key_type_value", - } - ], - "enforce_on_key_name": "enforce_on_key_name_value", - "exceed_action": "exceed_action_value", - "exceed_redirect_options": { - "target": "target_value", - "type_": "type__value", - }, - "rate_limit_threshold": {}, - }, - "redirect_options": {}, - } - ], - "self_link": "self_link_value", - "type_": "type__value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = compute.PatchRegionSecurityPolicyRequest.meta.fields[ - "security_policy_resource" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "security_policy_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["security_policy_resource"][field])): - del request_init["security_policy_resource"][field][i][subfield] - else: - del request_init["security_policy_resource"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3716,14 +6908,14 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch_unary(request) + response = client.remove_rule_unary(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Operation) -def test_patch_unary_rest_required_fields( - request_type=compute.PatchRegionSecurityPolicyRequest, +def test_remove_rule_unary_rest_required_fields( + request_type=compute.RemoveRuleRegionSecurityPolicyRequest, ): transport_class = transports.RegionSecurityPoliciesRestTransport @@ -3745,7 +6937,7 @@ def test_patch_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).remove_rule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -3756,9 +6948,9 @@ def test_patch_unary_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).patch._get_unset_required_fields(jsonified_request) + ).remove_rule._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set(("priority",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -3788,10 +6980,9 @@ def test_patch_unary_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -3804,34 +6995,33 @@ def test_patch_unary_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.patch_unary(request) + response = client.remove_rule_unary(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_patch_unary_rest_unset_required_fields(): +def test_remove_rule_unary_rest_unset_required_fields(): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.patch._get_unset_required_fields({}) + unset_fields = transport.remove_rule._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) + set(("priority",)) & set( ( "project", "region", "securityPolicy", - "securityPolicyResource", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_patch_unary_rest_interceptors(null_interceptor): +def test_remove_rule_unary_rest_interceptors(null_interceptor): transport = transports.RegionSecurityPoliciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3844,14 +7034,14 @@ def test_patch_unary_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "post_patch" + transports.RegionSecurityPoliciesRestInterceptor, "post_remove_rule" ) as post, mock.patch.object( - transports.RegionSecurityPoliciesRestInterceptor, "pre_patch" + transports.RegionSecurityPoliciesRestInterceptor, "pre_remove_rule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = compute.PatchRegionSecurityPolicyRequest.pb( - compute.PatchRegionSecurityPolicyRequest() + pb_message = compute.RemoveRuleRegionSecurityPolicyRequest.pb( + compute.RemoveRuleRegionSecurityPolicyRequest() ) transcode.return_value = { "method": "post", @@ -3865,7 +7055,7 @@ def test_patch_unary_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = compute.Operation.to_json(compute.Operation()) - request = compute.PatchRegionSecurityPolicyRequest() + request = compute.RemoveRuleRegionSecurityPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -3873,7 +7063,7 @@ def test_patch_unary_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = compute.Operation() - client.patch_unary( + client.remove_rule_unary( request, metadata=[ ("key", "val"), @@ -3885,8 +7075,8 @@ def test_patch_unary_rest_interceptors(null_interceptor): post.assert_called_once() -def test_patch_unary_rest_bad_request( - transport: str = "rest", request_type=compute.PatchRegionSecurityPolicyRequest +def test_remove_rule_unary_rest_bad_request( + transport: str = "rest", request_type=compute.RemoveRuleRegionSecurityPolicyRequest ): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3910,10 +7100,10 @@ def test_patch_unary_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.patch_unary(request) + client.remove_rule_unary(request) -def test_patch_unary_rest_flattened(): +def test_remove_rule_unary_rest_flattened(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3936,13 +7126,6 @@ def test_patch_unary_rest_flattened(): project="project_value", region="region_value", security_policy="security_policy_value", - security_policy_resource=compute.SecurityPolicy( - adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( - layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( - enable=True - ) - ) - ), ) mock_args.update(sample_request) @@ -3955,20 +7138,20 @@ def test_patch_unary_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.patch_unary(**mock_args) + client.remove_rule_unary(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}" + "%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}/removeRule" % client.transport._host, args[1], ) -def test_patch_unary_rest_flattened_error(transport: str = "rest"): +def test_remove_rule_unary_rest_flattened_error(transport: str = "rest"): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3977,22 +7160,15 @@ def test_patch_unary_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.patch_unary( - compute.PatchRegionSecurityPolicyRequest(), + client.remove_rule_unary( + compute.RemoveRuleRegionSecurityPolicyRequest(), project="project_value", region="region_value", security_policy="security_policy_value", - security_policy_resource=compute.SecurityPolicy( - adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( - layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( - enable=True - ) - ) - ), ) -def test_patch_unary_rest_error(): +def test_remove_rule_unary_rest_error(): client = RegionSecurityPoliciesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4108,11 +7284,15 @@ def test_region_security_policies_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( + "add_rule", "delete", "get", + "get_rule", "insert", "list", "patch", + "patch_rule", + "remove_rule", ) for method in methods: with pytest.raises(NotImplementedError): @@ -4250,12 +7430,18 @@ def test_region_security_policies_client_transport_session_collision(transport_n credentials=creds2, transport=transport_name, ) + session1 = client1.transport.add_rule._session + session2 = client2.transport.add_rule._session + assert session1 != session2 session1 = client1.transport.delete._session session2 = client2.transport.delete._session assert session1 != session2 session1 = client1.transport.get._session session2 = client2.transport.get._session assert session1 != session2 + session1 = client1.transport.get_rule._session + session2 = client2.transport.get_rule._session + assert session1 != session2 session1 = client1.transport.insert._session session2 = client2.transport.insert._session assert session1 != session2 @@ -4265,6 +7451,12 @@ def test_region_security_policies_client_transport_session_collision(transport_n session1 = client1.transport.patch._session session2 = client2.transport.patch._session assert session1 != session2 + session1 = client1.transport.patch_rule._session + session2 = client2.transport.patch_rule._session + assert session1 != session2 + session1 = client1.transport.remove_rule._session + session2 = client2.transport.remove_rule._session + assert session1 != session2 def test_common_billing_account_path(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py index cc13e8533774..570e80e1584e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py @@ -637,6 +637,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -701,6 +702,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py index 512e7ff41b55..9d1aaa3b6999 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py @@ -668,6 +668,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -732,6 +733,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py index 0443c6e0701b..c685cb429680 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py @@ -626,6 +626,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -690,6 +691,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) @@ -1848,6 +1850,299 @@ def test_get_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + compute.GetNatIpInfoRouterRequest, + dict, + ], +) +def test_get_nat_ip_info_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.NatIpInfoResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.NatIpInfoResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_nat_ip_info(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.NatIpInfoResponse) + + +def test_get_nat_ip_info_rest_required_fields( + request_type=compute.GetNatIpInfoRouterRequest, +): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["router"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_nat_ip_info._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["router"] = "router_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_nat_ip_info._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("nat_name",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "router" in jsonified_request + assert jsonified_request["router"] == "router_value" + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NatIpInfoResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.NatIpInfoResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_nat_ip_info(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_nat_ip_info_rest_unset_required_fields(): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_nat_ip_info._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("natName",)) + & set( + ( + "project", + "region", + "router", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_nat_ip_info_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RoutersRestInterceptor, "post_get_nat_ip_info" + ) as post, mock.patch.object( + transports.RoutersRestInterceptor, "pre_get_nat_ip_info" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetNatIpInfoRouterRequest.pb( + compute.GetNatIpInfoRouterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NatIpInfoResponse.to_json( + compute.NatIpInfoResponse() + ) + + request = compute.GetNatIpInfoRouterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NatIpInfoResponse() + + client.get_nat_ip_info( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_nat_ip_info_rest_bad_request( + transport: str = "rest", request_type=compute.GetNatIpInfoRouterRequest +): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_nat_ip_info(request) + + +def test_get_nat_ip_info_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.NatIpInfoResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "router": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + router="router_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.NatIpInfoResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_nat_ip_info(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/routers/{router}/getNatIpInfo" + % client.transport._host, + args[1], + ) + + +def test_get_nat_ip_info_rest_flattened_error(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_nat_ip_info( + compute.GetNatIpInfoRouterRequest(), + project="project_value", + region="region_value", + router="router_value", + ) + + +def test_get_nat_ip_info_rest_error(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2620,10 +2915,18 @@ def test_insert_rest(request_type): "source_nat_active_ips_value1", "source_nat_active_ips_value2", ], + "source_nat_active_ranges": [ + "source_nat_active_ranges_value1", + "source_nat_active_ranges_value2", + ], "source_nat_drain_ips": [ "source_nat_drain_ips_value1", "source_nat_drain_ips_value2", ], + "source_nat_drain_ranges": [ + "source_nat_drain_ranges_value1", + "source_nat_drain_ranges_value2", + ], }, "description": "description_value", "match": "match_value", @@ -2647,6 +2950,7 @@ def test_insert_rest(request_type): "tcp_established_idle_timeout_sec": 3371, "tcp_time_wait_timeout_sec": 2665, "tcp_transitory_idle_timeout_sec": 3330, + "type_": "type__value", "udp_idle_timeout_sec": 2118, } ], @@ -3134,10 +3438,18 @@ def test_insert_unary_rest(request_type): "source_nat_active_ips_value1", "source_nat_active_ips_value2", ], + "source_nat_active_ranges": [ + "source_nat_active_ranges_value1", + "source_nat_active_ranges_value2", + ], "source_nat_drain_ips": [ "source_nat_drain_ips_value1", "source_nat_drain_ips_value2", ], + "source_nat_drain_ranges": [ + "source_nat_drain_ranges_value1", + "source_nat_drain_ranges_value2", + ], }, "description": "description_value", "match": "match_value", @@ -3161,6 +3473,7 @@ def test_insert_unary_rest(request_type): "tcp_established_idle_timeout_sec": 3371, "tcp_time_wait_timeout_sec": 2665, "tcp_transitory_idle_timeout_sec": 3330, + "type_": "type__value", "udp_idle_timeout_sec": 2118, } ], @@ -3982,10 +4295,18 @@ def test_patch_rest(request_type): "source_nat_active_ips_value1", "source_nat_active_ips_value2", ], + "source_nat_active_ranges": [ + "source_nat_active_ranges_value1", + "source_nat_active_ranges_value2", + ], "source_nat_drain_ips": [ "source_nat_drain_ips_value1", "source_nat_drain_ips_value2", ], + "source_nat_drain_ranges": [ + "source_nat_drain_ranges_value1", + "source_nat_drain_ranges_value2", + ], }, "description": "description_value", "match": "match_value", @@ -4009,6 +4330,7 @@ def test_patch_rest(request_type): "tcp_established_idle_timeout_sec": 3371, "tcp_time_wait_timeout_sec": 2665, "tcp_transitory_idle_timeout_sec": 3330, + "type_": "type__value", "udp_idle_timeout_sec": 2118, } ], @@ -4507,10 +4829,18 @@ def test_patch_unary_rest(request_type): "source_nat_active_ips_value1", "source_nat_active_ips_value2", ], + "source_nat_active_ranges": [ + "source_nat_active_ranges_value1", + "source_nat_active_ranges_value2", + ], "source_nat_drain_ips": [ "source_nat_drain_ips_value1", "source_nat_drain_ips_value2", ], + "source_nat_drain_ranges": [ + "source_nat_drain_ranges_value1", + "source_nat_drain_ranges_value2", + ], }, "description": "description_value", "match": "match_value", @@ -4534,6 +4864,7 @@ def test_patch_unary_rest(request_type): "tcp_established_idle_timeout_sec": 3371, "tcp_time_wait_timeout_sec": 2665, "tcp_transitory_idle_timeout_sec": 3330, + "type_": "type__value", "udp_idle_timeout_sec": 2118, } ], @@ -5010,10 +5341,18 @@ def test_preview_rest(request_type): "source_nat_active_ips_value1", "source_nat_active_ips_value2", ], + "source_nat_active_ranges": [ + "source_nat_active_ranges_value1", + "source_nat_active_ranges_value2", + ], "source_nat_drain_ips": [ "source_nat_drain_ips_value1", "source_nat_drain_ips_value2", ], + "source_nat_drain_ranges": [ + "source_nat_drain_ranges_value1", + "source_nat_drain_ranges_value2", + ], }, "description": "description_value", "match": "match_value", @@ -5037,6 +5376,7 @@ def test_preview_rest(request_type): "tcp_established_idle_timeout_sec": 3371, "tcp_time_wait_timeout_sec": 2665, "tcp_transitory_idle_timeout_sec": 3330, + "type_": "type__value", "udp_idle_timeout_sec": 2118, } ], @@ -5490,10 +5830,18 @@ def test_update_rest(request_type): "source_nat_active_ips_value1", "source_nat_active_ips_value2", ], + "source_nat_active_ranges": [ + "source_nat_active_ranges_value1", + "source_nat_active_ranges_value2", + ], "source_nat_drain_ips": [ "source_nat_drain_ips_value1", "source_nat_drain_ips_value2", ], + "source_nat_drain_ranges": [ + "source_nat_drain_ranges_value1", + "source_nat_drain_ranges_value2", + ], }, "description": "description_value", "match": "match_value", @@ -5517,6 +5865,7 @@ def test_update_rest(request_type): "tcp_established_idle_timeout_sec": 3371, "tcp_time_wait_timeout_sec": 2665, "tcp_transitory_idle_timeout_sec": 3330, + "type_": "type__value", "udp_idle_timeout_sec": 2118, } ], @@ -6015,10 +6364,18 @@ def test_update_unary_rest(request_type): "source_nat_active_ips_value1", "source_nat_active_ips_value2", ], + "source_nat_active_ranges": [ + "source_nat_active_ranges_value1", + "source_nat_active_ranges_value2", + ], "source_nat_drain_ips": [ "source_nat_drain_ips_value1", "source_nat_drain_ips_value2", ], + "source_nat_drain_ranges": [ + "source_nat_drain_ranges_value1", + "source_nat_drain_ranges_value2", + ], }, "description": "description_value", "match": "match_value", @@ -6042,6 +6399,7 @@ def test_update_unary_rest(request_type): "tcp_established_idle_timeout_sec": 3371, "tcp_time_wait_timeout_sec": 2665, "tcp_transitory_idle_timeout_sec": 3330, + "type_": "type__value", "udp_idle_timeout_sec": 2118, } ], @@ -6532,6 +6890,7 @@ def test_routers_base_transport(): "aggregated_list", "delete", "get", + "get_nat_ip_info", "get_nat_mapping_info", "get_router_status", "insert", @@ -6685,6 +7044,9 @@ def test_routers_client_transport_session_collision(transport_name): session1 = client1.transport.get._session session2 = client2.transport.get._session assert session1 != session2 + session1 = client1.transport.get_nat_ip_info._session + session2 = client2.transport.get_nat_ip_info._session + assert session1 != session2 session1 = client1.transport.get_nat_mapping_info._session session2 = client2.transport.get_nat_mapping_info._session assert session1 != session2 diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py index f125fb8ded3f..0a32539e7b5f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py @@ -617,6 +617,18 @@ def test_add_rule_rest(request_type): }, "versioned_expr": "versioned_expr_value", }, + "network_match": { + "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], + "user_defined_fields": [ + {"name": "name_value", "values": ["values_value1", "values_value2"]} + ], + }, "preconfigured_waf_config": { "exclusions": [ { @@ -1091,6 +1103,18 @@ def test_add_rule_unary_rest(request_type): }, "versioned_expr": "versioned_expr_value", }, + "network_match": { + "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], + "user_defined_fields": [ + {"name": "name_value", "values": ["values_value1", "values_value2"]} + ], + }, "preconfigured_waf_config": { "exclusions": [ { @@ -1594,6 +1618,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -1658,6 +1683,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) @@ -3123,6 +3149,15 @@ def test_insert_rest(request_type): "layer7_ddos_defense_config": { "enable": True, "rule_visibility": "rule_visibility_value", + "threshold_configs": [ + { + "auto_deploy_confidence_threshold": 0.339, + "auto_deploy_expiration_sec": 2785, + "auto_deploy_impacted_baseline_threshold": 0.4121, + "auto_deploy_load_threshold": 0.2768, + "name": "name_value", + } + ], } }, "advanced_options_config": { @@ -3131,6 +3166,10 @@ def test_insert_rest(request_type): }, "json_parsing": "json_parsing_value", "log_level": "log_level_value", + "user_ip_request_headers": [ + "user_ip_request_headers_value1", + "user_ip_request_headers_value2", + ], }, "creation_timestamp": "creation_timestamp_value", "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, @@ -3171,6 +3210,27 @@ def test_insert_rest(request_type): }, "versioned_expr": "versioned_expr_value", }, + "network_match": { + "dest_ip_ranges": [ + "dest_ip_ranges_value1", + "dest_ip_ranges_value2", + ], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": [ + "src_region_codes_value1", + "src_region_codes_value2", + ], + "user_defined_fields": [ + { + "name": "name_value", + "values": ["values_value1", "values_value2"], + } + ], + }, "preconfigured_waf_config": { "exclusions": [ { @@ -3214,6 +3274,15 @@ def test_insert_rest(request_type): ], "self_link": "self_link_value", "type_": "type__value", + "user_defined_fields": [ + { + "base": "base_value", + "mask": "mask_value", + "name": "name_value", + "offset": 647, + "size": 443, + } + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -3635,6 +3704,15 @@ def test_insert_unary_rest(request_type): "layer7_ddos_defense_config": { "enable": True, "rule_visibility": "rule_visibility_value", + "threshold_configs": [ + { + "auto_deploy_confidence_threshold": 0.339, + "auto_deploy_expiration_sec": 2785, + "auto_deploy_impacted_baseline_threshold": 0.4121, + "auto_deploy_load_threshold": 0.2768, + "name": "name_value", + } + ], } }, "advanced_options_config": { @@ -3643,6 +3721,10 @@ def test_insert_unary_rest(request_type): }, "json_parsing": "json_parsing_value", "log_level": "log_level_value", + "user_ip_request_headers": [ + "user_ip_request_headers_value1", + "user_ip_request_headers_value2", + ], }, "creation_timestamp": "creation_timestamp_value", "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, @@ -3683,6 +3765,27 @@ def test_insert_unary_rest(request_type): }, "versioned_expr": "versioned_expr_value", }, + "network_match": { + "dest_ip_ranges": [ + "dest_ip_ranges_value1", + "dest_ip_ranges_value2", + ], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": [ + "src_region_codes_value1", + "src_region_codes_value2", + ], + "user_defined_fields": [ + { + "name": "name_value", + "values": ["values_value1", "values_value2"], + } + ], + }, "preconfigured_waf_config": { "exclusions": [ { @@ -3726,6 +3829,15 @@ def test_insert_unary_rest(request_type): ], "self_link": "self_link_value", "type_": "type__value", + "user_defined_fields": [ + { + "base": "base_value", + "mask": "mask_value", + "name": "name_value", + "offset": 647, + "size": 443, + } + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -4790,6 +4902,15 @@ def test_patch_rest(request_type): "layer7_ddos_defense_config": { "enable": True, "rule_visibility": "rule_visibility_value", + "threshold_configs": [ + { + "auto_deploy_confidence_threshold": 0.339, + "auto_deploy_expiration_sec": 2785, + "auto_deploy_impacted_baseline_threshold": 0.4121, + "auto_deploy_load_threshold": 0.2768, + "name": "name_value", + } + ], } }, "advanced_options_config": { @@ -4798,6 +4919,10 @@ def test_patch_rest(request_type): }, "json_parsing": "json_parsing_value", "log_level": "log_level_value", + "user_ip_request_headers": [ + "user_ip_request_headers_value1", + "user_ip_request_headers_value2", + ], }, "creation_timestamp": "creation_timestamp_value", "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, @@ -4838,6 +4963,27 @@ def test_patch_rest(request_type): }, "versioned_expr": "versioned_expr_value", }, + "network_match": { + "dest_ip_ranges": [ + "dest_ip_ranges_value1", + "dest_ip_ranges_value2", + ], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": [ + "src_region_codes_value1", + "src_region_codes_value2", + ], + "user_defined_fields": [ + { + "name": "name_value", + "values": ["values_value1", "values_value2"], + } + ], + }, "preconfigured_waf_config": { "exclusions": [ { @@ -4881,6 +5027,15 @@ def test_patch_rest(request_type): ], "self_link": "self_link_value", "type_": "type__value", + "user_defined_fields": [ + { + "base": "base_value", + "mask": "mask_value", + "name": "name_value", + "offset": 647, + "size": 443, + } + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5052,7 +5207,12 @@ def test_patch_rest_required_fields(request_type=compute.PatchSecurityPolicyRequ credentials=ga_credentials.AnonymousCredentials() ).patch._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5110,7 +5270,12 @@ def test_patch_rest_unset_required_fields(): unset_fields = transport.patch._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) + set( + ( + "requestId", + "updateMask", + ) + ) & set( ( "project", @@ -5299,6 +5464,15 @@ def test_patch_unary_rest(request_type): "layer7_ddos_defense_config": { "enable": True, "rule_visibility": "rule_visibility_value", + "threshold_configs": [ + { + "auto_deploy_confidence_threshold": 0.339, + "auto_deploy_expiration_sec": 2785, + "auto_deploy_impacted_baseline_threshold": 0.4121, + "auto_deploy_load_threshold": 0.2768, + "name": "name_value", + } + ], } }, "advanced_options_config": { @@ -5307,6 +5481,10 @@ def test_patch_unary_rest(request_type): }, "json_parsing": "json_parsing_value", "log_level": "log_level_value", + "user_ip_request_headers": [ + "user_ip_request_headers_value1", + "user_ip_request_headers_value2", + ], }, "creation_timestamp": "creation_timestamp_value", "ddos_protection_config": {"ddos_protection": "ddos_protection_value"}, @@ -5347,6 +5525,27 @@ def test_patch_unary_rest(request_type): }, "versioned_expr": "versioned_expr_value", }, + "network_match": { + "dest_ip_ranges": [ + "dest_ip_ranges_value1", + "dest_ip_ranges_value2", + ], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": [ + "src_region_codes_value1", + "src_region_codes_value2", + ], + "user_defined_fields": [ + { + "name": "name_value", + "values": ["values_value1", "values_value2"], + } + ], + }, "preconfigured_waf_config": { "exclusions": [ { @@ -5390,6 +5589,15 @@ def test_patch_unary_rest(request_type): ], "self_link": "self_link_value", "type_": "type__value", + "user_defined_fields": [ + { + "base": "base_value", + "mask": "mask_value", + "name": "name_value", + "offset": 647, + "size": 443, + } + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5541,7 +5749,12 @@ def test_patch_unary_rest_required_fields( credentials=ga_credentials.AnonymousCredentials() ).patch._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5599,7 +5812,12 @@ def test_patch_unary_rest_unset_required_fields(): unset_fields = transport.patch._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) + set( + ( + "requestId", + "updateMask", + ) + ) & set( ( "project", @@ -5807,6 +6025,18 @@ def test_patch_rule_rest(request_type): }, "versioned_expr": "versioned_expr_value", }, + "network_match": { + "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], + "user_defined_fields": [ + {"name": "name_value", "values": ["values_value1", "values_value2"]} + ], + }, "preconfigured_waf_config": { "exclusions": [ { @@ -6026,6 +6256,7 @@ def test_patch_rule_rest_required_fields( assert not set(unset_fields) - set( ( "priority", + "update_mask", "validate_only", ) ) @@ -6089,6 +6320,7 @@ def test_patch_rule_rest_unset_required_fields(): set( ( "priority", + "updateMask", "validateOnly", ) ) @@ -6291,6 +6523,18 @@ def test_patch_rule_unary_rest(request_type): }, "versioned_expr": "versioned_expr_value", }, + "network_match": { + "dest_ip_ranges": ["dest_ip_ranges_value1", "dest_ip_ranges_value2"], + "dest_ports": ["dest_ports_value1", "dest_ports_value2"], + "ip_protocols": ["ip_protocols_value1", "ip_protocols_value2"], + "src_asns": [861, 862], + "src_ip_ranges": ["src_ip_ranges_value1", "src_ip_ranges_value2"], + "src_ports": ["src_ports_value1", "src_ports_value2"], + "src_region_codes": ["src_region_codes_value1", "src_region_codes_value2"], + "user_defined_fields": [ + {"name": "name_value", "values": ["values_value1", "values_value2"]} + ], + }, "preconfigured_waf_config": { "exclusions": [ { @@ -6488,6 +6732,7 @@ def test_patch_rule_unary_rest_required_fields( assert not set(unset_fields) - set( ( "priority", + "update_mask", "validate_only", ) ) @@ -6551,6 +6796,7 @@ def test_patch_rule_unary_rest_unset_required_fields(): set( ( "priority", + "updateMask", "validateOnly", ) ) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py index 4dcc1f5ad4f2..df4df88bab31 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py @@ -666,6 +666,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -730,6 +731,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py new file mode 100644 index 000000000000..1b66350d89f4 --- /dev/null +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py @@ -0,0 +1,2130 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.compute_v1.services.snapshot_settings_service import ( + SnapshotSettingsServiceClient, + transports, +) +from google.cloud.compute_v1.types import compute + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SnapshotSettingsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SnapshotSettingsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SnapshotSettingsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SnapshotSettingsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SnapshotSettingsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SnapshotSettingsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SnapshotSettingsServiceClient, "rest"), + ], +) +def test_snapshot_settings_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SnapshotSettingsServiceRestTransport, "rest"), + ], +) +def test_snapshot_settings_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SnapshotSettingsServiceClient, "rest"), + ], +) +def test_snapshot_settings_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +def test_snapshot_settings_service_client_get_transport_class(): + transport = SnapshotSettingsServiceClient.get_transport_class() + available_transports = [ + transports.SnapshotSettingsServiceRestTransport, + ] + assert transport in available_transports + + transport = SnapshotSettingsServiceClient.get_transport_class("rest") + assert transport == transports.SnapshotSettingsServiceRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SnapshotSettingsServiceClient, + transports.SnapshotSettingsServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + SnapshotSettingsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SnapshotSettingsServiceClient), +) +def test_snapshot_settings_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SnapshotSettingsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SnapshotSettingsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + SnapshotSettingsServiceClient, + transports.SnapshotSettingsServiceRestTransport, + "rest", + "true", + ), + ( + SnapshotSettingsServiceClient, + transports.SnapshotSettingsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + SnapshotSettingsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SnapshotSettingsServiceClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_snapshot_settings_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [SnapshotSettingsServiceClient]) +@mock.patch.object( + SnapshotSettingsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SnapshotSettingsServiceClient), +) +def test_snapshot_settings_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SnapshotSettingsServiceClient, + transports.SnapshotSettingsServiceRestTransport, + "rest", + ), + ], +) +def test_snapshot_settings_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SnapshotSettingsServiceClient, + transports.SnapshotSettingsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_snapshot_settings_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetSnapshotSettingRequest, + dict, + ], +) +def test_get_rest(request_type): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.SnapshotSettings() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.SnapshotSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SnapshotSettings) + + +def test_get_rest_required_fields(request_type=compute.GetSnapshotSettingRequest): + transport_class = transports.SnapshotSettingsServiceRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SnapshotSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.SnapshotSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.SnapshotSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.SnapshotSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SnapshotSettingsServiceRestInterceptor(), + ) + client = SnapshotSettingsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SnapshotSettingsServiceRestInterceptor, "post_get" + ) as post, mock.patch.object( + transports.SnapshotSettingsServiceRestInterceptor, "pre_get" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetSnapshotSettingRequest.pb( + compute.GetSnapshotSettingRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SnapshotSettings.to_json( + compute.SnapshotSettings() + ) + + request = compute.GetSnapshotSettingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SnapshotSettings() + + client.get( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetSnapshotSettingRequest +): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.SnapshotSettings() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.SnapshotSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/snapshotSettings" + % client.transport._host, + args[1], + ) + + +def test_get_rest_flattened_error(transport: str = "rest"): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetSnapshotSettingRequest(), + project="project_value", + ) + + +def test_get_rest_error(): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchSnapshotSettingRequest, + dict, + ], +) +def test_patch_rest(request_type): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["snapshot_settings_resource"] = { + "storage_location": {"locations": {}, "policy": "policy_value"} + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchSnapshotSettingRequest.meta.fields[ + "snapshot_settings_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "snapshot_settings_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["snapshot_settings_resource"][field]) + ): + del request_init["snapshot_settings_resource"][field][i][subfield] + else: + del request_init["snapshot_settings_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_patch_rest_required_fields(request_type=compute.PatchSnapshotSettingRequest): + transport_class = transports.SnapshotSettingsServiceRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.SnapshotSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "project", + "snapshotSettingsResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.SnapshotSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SnapshotSettingsServiceRestInterceptor(), + ) + client = SnapshotSettingsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SnapshotSettingsServiceRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.SnapshotSettingsServiceRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchSnapshotSettingRequest.pb( + compute.PatchSnapshotSettingRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchSnapshotSettingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchSnapshotSettingRequest +): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + snapshot_settings_resource=compute.SnapshotSettings( + storage_location=compute.SnapshotSettingsStorageLocationSettings( + locations={ + "key_value": compute.SnapshotSettingsStorageLocationSettingsStorageLocationPreference( + name="name_value" + ) + } + ) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/snapshotSettings" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchSnapshotSettingRequest(), + project="project_value", + snapshot_settings_resource=compute.SnapshotSettings( + storage_location=compute.SnapshotSettingsStorageLocationSettings( + locations={ + "key_value": compute.SnapshotSettingsStorageLocationSettingsStorageLocationPreference( + name="name_value" + ) + } + ) + ), + ) + + +def test_patch_rest_error(): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchSnapshotSettingRequest, + dict, + ], +) +def test_patch_unary_rest(request_type): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["snapshot_settings_resource"] = { + "storage_location": {"locations": {}, "policy": "policy_value"} + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchSnapshotSettingRequest.meta.fields[ + "snapshot_settings_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "snapshot_settings_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["snapshot_settings_resource"][field]) + ): + del request_init["snapshot_settings_resource"][field][i][subfield] + else: + del request_init["snapshot_settings_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields( + request_type=compute.PatchSnapshotSettingRequest, +): + transport_class = transports.SnapshotSettingsServiceRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.SnapshotSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "project", + "snapshotSettingsResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.SnapshotSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SnapshotSettingsServiceRestInterceptor(), + ) + client = SnapshotSettingsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SnapshotSettingsServiceRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.SnapshotSettingsServiceRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchSnapshotSettingRequest.pb( + compute.PatchSnapshotSettingRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchSnapshotSettingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request( + transport: str = "rest", request_type=compute.PatchSnapshotSettingRequest +): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + snapshot_settings_resource=compute.SnapshotSettings( + storage_location=compute.SnapshotSettingsStorageLocationSettings( + locations={ + "key_value": compute.SnapshotSettingsStorageLocationSettingsStorageLocationPreference( + name="name_value" + ) + } + ) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/snapshotSettings" + % client.transport._host, + args[1], + ) + + +def test_patch_unary_rest_flattened_error(transport: str = "rest"): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchSnapshotSettingRequest(), + project="project_value", + snapshot_settings_resource=compute.SnapshotSettings( + storage_location=compute.SnapshotSettingsStorageLocationSettings( + locations={ + "key_value": compute.SnapshotSettingsStorageLocationSettingsStorageLocationPreference( + name="name_value" + ) + } + ) + ), + ) + + +def test_patch_unary_rest_error(): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SnapshotSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SnapshotSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SnapshotSettingsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SnapshotSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SnapshotSettingsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SnapshotSettingsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SnapshotSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SnapshotSettingsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SnapshotSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SnapshotSettingsServiceClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SnapshotSettingsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SnapshotSettingsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_snapshot_settings_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SnapshotSettingsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_snapshot_settings_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.compute_v1.services.snapshot_settings_service.transports.SnapshotSettingsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SnapshotSettingsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get", + "patch", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_snapshot_settings_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.compute_v1.services.snapshot_settings_service.transports.SnapshotSettingsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SnapshotSettingsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_snapshot_settings_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.compute_v1.services.snapshot_settings_service.transports.SnapshotSettingsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SnapshotSettingsServiceTransport() + adc.assert_called_once() + + +def test_snapshot_settings_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SnapshotSettingsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +def test_snapshot_settings_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SnapshotSettingsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_snapshot_settings_service_host_no_port(transport_name): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_snapshot_settings_service_host_with_port(transport_name): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_snapshot_settings_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SnapshotSettingsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SnapshotSettingsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SnapshotSettingsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = SnapshotSettingsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotSettingsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SnapshotSettingsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = SnapshotSettingsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotSettingsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SnapshotSettingsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = SnapshotSettingsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotSettingsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = SnapshotSettingsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = SnapshotSettingsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotSettingsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SnapshotSettingsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = SnapshotSettingsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotSettingsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SnapshotSettingsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SnapshotSettingsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SnapshotSettingsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + SnapshotSettingsServiceClient, + transports.SnapshotSettingsServiceRestTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py index d16d61b4c067..33649abcc42d 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py @@ -1205,6 +1205,7 @@ def test_get_rest(request_type): self_link="self_link_value", snapshot_type="snapshot_type_value", source_disk="source_disk_value", + source_disk_for_recovery_checkpoint="source_disk_for_recovery_checkpoint_value", source_disk_id="source_disk_id_value", source_snapshot_schedule_policy="source_snapshot_schedule_policy_value", source_snapshot_schedule_policy_id="source_snapshot_schedule_policy_id_value", @@ -1246,6 +1247,10 @@ def test_get_rest(request_type): assert response.self_link == "self_link_value" assert response.snapshot_type == "snapshot_type_value" assert response.source_disk == "source_disk_value" + assert ( + response.source_disk_for_recovery_checkpoint + == "source_disk_for_recovery_checkpoint_value" + ) assert response.source_disk_id == "source_disk_id_value" assert ( response.source_snapshot_schedule_policy @@ -1809,6 +1814,7 @@ def test_insert_rest(request_type): "description": "description_value", "disk_size_gb": 1261, "download_bytes": 1502, + "guest_os_features": [{"type_": "type__value"}], "id": 205, "kind": "kind_value", "label_fingerprint": "label_fingerprint_value", @@ -1829,6 +1835,7 @@ def test_insert_rest(request_type): "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, + "source_disk_for_recovery_checkpoint": "source_disk_for_recovery_checkpoint_value", "source_disk_id": "source_disk_id_value", "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", @@ -2231,6 +2238,7 @@ def test_insert_unary_rest(request_type): "description": "description_value", "disk_size_gb": 1261, "download_bytes": 1502, + "guest_os_features": [{"type_": "type__value"}], "id": 205, "kind": "kind_value", "label_fingerprint": "label_fingerprint_value", @@ -2251,6 +2259,7 @@ def test_insert_unary_rest(request_type): "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, + "source_disk_for_recovery_checkpoint": "source_disk_for_recovery_checkpoint_value", "source_disk_id": "source_disk_id_value", "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py index 7c29bd001a95..269a29c00c1a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py @@ -660,6 +660,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -724,6 +725,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py index a52395cca454..8f39812372d9 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py @@ -639,6 +639,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -703,6 +704,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py index 77f89c7d3d24..c6048cb0c5be 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py @@ -637,6 +637,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -701,6 +702,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py index 6993d96a0361..7317ddd27aa1 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py @@ -666,6 +666,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -730,6 +731,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py index 52a6d4a5216b..4088a33117fc 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py @@ -666,6 +666,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -730,6 +731,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py index c968b30a9b04..a7b77a0d4413 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py @@ -660,6 +660,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -724,6 +725,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) @@ -1644,6 +1646,7 @@ def test_get_rest(request_type): name="name_value", nat_policy="nat_policy_value", network="network_value", + security_policy="security_policy_value", self_link="self_link_value", zone="zone_value", ) @@ -1669,6 +1672,7 @@ def test_get_rest(request_type): assert response.name == "name_value" assert response.nat_policy == "nat_policy_value" assert response.network == "network_value" + assert response.security_policy == "security_policy_value" assert response.self_link == "self_link_value" assert response.zone == "zone_value" @@ -1955,6 +1959,7 @@ def test_insert_rest(request_type): "name": "name_value", "nat_policy": "nat_policy_value", "network": "network_value", + "security_policy": "security_policy_value", "self_link": "self_link_value", "zone": "zone_value", } @@ -2371,6 +2376,7 @@ def test_insert_unary_rest(request_type): "name": "name_value", "nat_policy": "nat_policy_value", "network": "network_value", + "security_policy": "security_policy_value", "self_link": "self_link_value", "zone": "zone_value", } @@ -3105,6 +3111,848 @@ def test_list_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + compute.SetSecurityPolicyTargetInstanceRequest, + dict, + ], +) +def test_set_security_policy_rest(request_type): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "target_instance": "sample3", + } + request_init["security_policy_reference_resource"] = { + "security_policy": "security_policy_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSecurityPolicyTargetInstanceRequest.meta.fields[ + "security_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_reference_resource"][field]) + ): + del request_init["security_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_reference_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_security_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_set_security_policy_rest_required_fields( + request_type=compute.SetSecurityPolicyTargetInstanceRequest, +): + transport_class = transports.TargetInstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_instance"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["targetInstance"] = "target_instance_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "targetInstance" in jsonified_request + assert jsonified_request["targetInstance"] == "target_instance_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_security_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_security_policy_rest_unset_required_fields(): + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_security_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "securityPolicyReferenceResource", + "targetInstance", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_security_policy_rest_interceptors(null_interceptor): + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TargetInstancesRestInterceptor(), + ) + client = TargetInstancesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TargetInstancesRestInterceptor, "post_set_security_policy" + ) as post, mock.patch.object( + transports.TargetInstancesRestInterceptor, "pre_set_security_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSecurityPolicyTargetInstanceRequest.pb( + compute.SetSecurityPolicyTargetInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSecurityPolicyTargetInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_security_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_security_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetSecurityPolicyTargetInstanceRequest +): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "target_instance": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_security_policy(request) + + +def test_set_security_policy_rest_flattened(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "target_instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + target_instance="target_instance_value", + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_security_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/targetInstances/{target_instance}/setSecurityPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_security_policy_rest_flattened_error(transport: str = "rest"): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_security_policy( + compute.SetSecurityPolicyTargetInstanceRequest(), + project="project_value", + zone="zone_value", + target_instance="target_instance_value", + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" + ), + ) + + +def test_set_security_policy_rest_error(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetSecurityPolicyTargetInstanceRequest, + dict, + ], +) +def test_set_security_policy_unary_rest(request_type): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "target_instance": "sample3", + } + request_init["security_policy_reference_resource"] = { + "security_policy": "security_policy_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSecurityPolicyTargetInstanceRequest.meta.fields[ + "security_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_reference_resource"][field]) + ): + del request_init["security_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_reference_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_security_policy_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_security_policy_unary_rest_required_fields( + request_type=compute.SetSecurityPolicyTargetInstanceRequest, +): + transport_class = transports.TargetInstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_instance"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["targetInstance"] = "target_instance_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "targetInstance" in jsonified_request + assert jsonified_request["targetInstance"] == "target_instance_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_security_policy_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_security_policy_unary_rest_unset_required_fields(): + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_security_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "securityPolicyReferenceResource", + "targetInstance", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_security_policy_unary_rest_interceptors(null_interceptor): + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TargetInstancesRestInterceptor(), + ) + client = TargetInstancesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TargetInstancesRestInterceptor, "post_set_security_policy" + ) as post, mock.patch.object( + transports.TargetInstancesRestInterceptor, "pre_set_security_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSecurityPolicyTargetInstanceRequest.pb( + compute.SetSecurityPolicyTargetInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSecurityPolicyTargetInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_security_policy_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_security_policy_unary_rest_bad_request( + transport: str = "rest", request_type=compute.SetSecurityPolicyTargetInstanceRequest +): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "target_instance": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_security_policy_unary(request) + + +def test_set_security_policy_unary_rest_flattened(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "target_instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + target_instance="target_instance_value", + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_security_policy_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/targetInstances/{target_instance}/setSecurityPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_security_policy_unary_rest_flattened_error(transport: str = "rest"): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_security_policy_unary( + compute.SetSecurityPolicyTargetInstanceRequest(), + project="project_value", + zone="zone_value", + target_instance="target_instance_value", + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" + ), + ) + + +def test_set_security_policy_unary_rest_error(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.TargetInstancesRestTransport( @@ -3220,6 +4068,7 @@ def test_target_instances_base_transport(): "get", "insert", "list", + "set_security_policy", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3372,6 +4221,9 @@ def test_target_instances_client_transport_session_collision(transport_name): session1 = client1.transport.list._session session2 = client2.transport.list._session assert session1 != session2 + session1 = client1.transport.set_security_policy._session + session2 = client2.transport.set_security_policy._session + assert session1 != session2 def test_common_billing_account_path(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py index c25ad24e59bf..d7d75d9e1533 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py @@ -2325,6 +2325,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -2389,6 +2390,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) @@ -3289,6 +3291,7 @@ def test_get_rest(request_type): kind="kind_value", name="name_value", region="region_value", + security_policy="security_policy_value", self_link="self_link_value", session_affinity="session_affinity_value", ) @@ -3316,6 +3319,7 @@ def test_get_rest(request_type): assert response.kind == "kind_value" assert response.name == "name_value" assert response.region == "region_value" + assert response.security_policy == "security_policy_value" assert response.self_link == "self_link_value" assert response.session_affinity == "session_affinity_value" @@ -3974,6 +3978,7 @@ def test_insert_rest(request_type): "kind": "kind_value", "name": "name_value", "region": "region_value", + "security_policy": "security_policy_value", "self_link": "self_link_value", "session_affinity": "session_affinity_value", } @@ -4386,6 +4391,7 @@ def test_insert_unary_rest(request_type): "kind": "kind_value", "name": "name_value", "region": "region_value", + "security_policy": "security_policy_value", "self_link": "self_link_value", "session_affinity": "session_affinity_value", } @@ -7630,6 +7636,832 @@ def test_set_backup_unary_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + compute.SetSecurityPolicyTargetPoolRequest, + dict, + ], +) +def test_set_security_policy_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request_init["security_policy_reference_resource"] = { + "security_policy": "security_policy_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSecurityPolicyTargetPoolRequest.meta.fields[ + "security_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_reference_resource"][field]) + ): + del request_init["security_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_reference_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_security_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_set_security_policy_rest_required_fields( + request_type=compute.SetSecurityPolicyTargetPoolRequest, +): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["targetPool"] = "target_pool_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == "target_pool_value" + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_security_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_security_policy_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_security_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "region", + "securityPolicyReferenceResource", + "targetPool", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_security_policy_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TargetPoolsRestInterceptor, "post_set_security_policy" + ) as post, mock.patch.object( + transports.TargetPoolsRestInterceptor, "pre_set_security_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSecurityPolicyTargetPoolRequest.pb( + compute.SetSecurityPolicyTargetPoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSecurityPolicyTargetPoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_security_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_security_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetSecurityPolicyTargetPoolRequest +): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_security_policy(request) + + +def test_set_security_policy_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + target_pool="target_pool_value", + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_security_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/setSecurityPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_security_policy_rest_flattened_error(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_security_policy( + compute.SetSecurityPolicyTargetPoolRequest(), + project="project_value", + region="region_value", + target_pool="target_pool_value", + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" + ), + ) + + +def test_set_security_policy_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetSecurityPolicyTargetPoolRequest, + dict, + ], +) +def test_set_security_policy_unary_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request_init["security_policy_reference_resource"] = { + "security_policy": "security_policy_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetSecurityPolicyTargetPoolRequest.meta.fields[ + "security_policy_reference_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_policy_reference_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["security_policy_reference_resource"][field]) + ): + del request_init["security_policy_reference_resource"][field][i][ + subfield + ] + else: + del request_init["security_policy_reference_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_security_policy_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_security_policy_unary_rest_required_fields( + request_type=compute.SetSecurityPolicyTargetPoolRequest, +): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["targetPool"] = "target_pool_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == "target_pool_value" + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_security_policy_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_security_policy_unary_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_security_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "region", + "securityPolicyReferenceResource", + "targetPool", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_security_policy_unary_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TargetPoolsRestInterceptor, "post_set_security_policy" + ) as post, mock.patch.object( + transports.TargetPoolsRestInterceptor, "pre_set_security_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSecurityPolicyTargetPoolRequest.pb( + compute.SetSecurityPolicyTargetPoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSecurityPolicyTargetPoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_security_policy_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_security_policy_unary_rest_bad_request( + transport: str = "rest", request_type=compute.SetSecurityPolicyTargetPoolRequest +): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_security_policy_unary(request) + + +def test_set_security_policy_unary_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + target_pool="target_pool_value", + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_security_policy_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/setSecurityPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_security_policy_unary_rest_flattened_error(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_security_policy_unary( + compute.SetSecurityPolicyTargetPoolRequest(), + project="project_value", + region="region_value", + target_pool="target_pool_value", + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" + ), + ) + + +def test_set_security_policy_unary_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.TargetPoolsRestTransport( @@ -7751,6 +8583,7 @@ def test_target_pools_base_transport(): "remove_health_check", "remove_instance", "set_backup", + "set_security_policy", ) for method in methods: with pytest.raises(NotImplementedError): @@ -7921,6 +8754,9 @@ def test_target_pools_client_transport_session_collision(transport_name): session1 = client1.transport.set_backup._session session2 = client2.transport.set_backup._session assert session1 != session2 + session1 = client1.transport.set_security_policy._session + session2 = client2.transport.set_security_policy._session + assert session1 != session2 def test_common_billing_account_path(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py index 7795c0acac56..52a0417abef7 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py @@ -666,6 +666,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -730,6 +731,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py index 79a0a7e48901..45a8258ddba9 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py @@ -666,6 +666,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -730,6 +731,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py index 07aea15f3adf..c0a399469af8 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py @@ -628,6 +628,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -692,6 +693,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py index 3f11d1230a93..6621d60f750a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py @@ -637,6 +637,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -701,6 +702,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",)) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py index 7f5f175e5ff1..ad24571c8ebf 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py @@ -637,6 +637,7 @@ def test_aggregated_list_rest_required_fields( "order_by", "page_token", "return_partial_success", + "service_project_number", ) ) jsonified_request.update(unset_fields) @@ -701,6 +702,7 @@ def test_aggregated_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "serviceProjectNumber", ) ) & set(("project",))