Skip to content

Commit

Permalink
docs: add generated snippets (#342)
Browse files Browse the repository at this point in the history
* chore: use gapic-generator-python 0.63.2
docs: add generated snippets

PiperOrigin-RevId: 427792504

Source-Link: googleapis/googleapis@55b9e1e

Source-Link: googleapis/googleapis-gen@bf4e86b
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmY0ZTg2Yjc1M2Y0MmNiMGVkYjFmZDUxZmJlODQwZDdkYTBhMWNkZSJ9

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
gcf-owl-bot[bot] and gcf-owl-bot[bot] authored Feb 11, 2022
1 parent e421c05 commit 6e19333
Show file tree
Hide file tree
Showing 73 changed files with 7,264 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,31 @@ async def create_autoscaling_policy(
) -> autoscaling_policies.AutoscalingPolicy:
r"""Creates new autoscaling policy.
.. code-block::
from google.cloud import dataproc_v1
def sample_create_autoscaling_policy():
# Create a client
client = dataproc_v1.AutoscalingPolicyServiceClient()
# Initialize request argument(s)
policy = dataproc_v1.AutoscalingPolicy()
policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578
policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789
policy.worker_config.max_instances = 1389
request = dataproc_v1.CreateAutoscalingPolicyRequest(
parent="parent_value",
policy=policy,
)
# Make the request
response = client.create_autoscaling_policy(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dataproc_v1.types.CreateAutoscalingPolicyRequest, dict]):
The request object. A request to create an autoscaling
Expand Down Expand Up @@ -323,6 +348,31 @@ async def update_autoscaling_policy(
Disabled check for update_mask, because all updates will be full
replacements.
.. code-block::
from google.cloud import dataproc_v1
def sample_update_autoscaling_policy():
# Create a client
client = dataproc_v1.AutoscalingPolicyServiceClient()
# Initialize request argument(s)
policy = dataproc_v1.AutoscalingPolicy()
policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578
policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789
policy.worker_config.max_instances = 1389
request = dataproc_v1.UpdateAutoscalingPolicyRequest(
policy=policy,
)
# Make the request
response = client.update_autoscaling_policy(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dataproc_v1.types.UpdateAutoscalingPolicyRequest, dict]):
The request object. A request to update an autoscaling
Expand Down Expand Up @@ -406,6 +456,25 @@ async def get_autoscaling_policy(
) -> autoscaling_policies.AutoscalingPolicy:
r"""Retrieves autoscaling policy.
.. code-block::
from google.cloud import dataproc_v1
def sample_get_autoscaling_policy():
# Create a client
client = dataproc_v1.AutoscalingPolicyServiceClient()
# Initialize request argument(s)
request = dataproc_v1.GetAutoscalingPolicyRequest(
name="name_value",
)
# Make the request
response = client.get_autoscaling_policy(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dataproc_v1.types.GetAutoscalingPolicyRequest, dict]):
The request object. A request to fetch an autoscaling
Expand Down Expand Up @@ -499,6 +568,26 @@ async def list_autoscaling_policies(
) -> pagers.ListAutoscalingPoliciesAsyncPager:
r"""Lists autoscaling policies in the project.
.. code-block::
from google.cloud import dataproc_v1
def sample_list_autoscaling_policies():
# Create a client
client = dataproc_v1.AutoscalingPolicyServiceClient()
# Initialize request argument(s)
request = dataproc_v1.ListAutoscalingPoliciesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_autoscaling_policies(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest, dict]):
The request object. A request to list autoscaling
Expand Down Expand Up @@ -603,6 +692,23 @@ async def delete_autoscaling_policy(
delete an autoscaling policy that is in use by one or
more clusters.
.. code-block::
from google.cloud import dataproc_v1
def sample_delete_autoscaling_policy():
# Create a client
client = dataproc_v1.AutoscalingPolicyServiceClient()
# Initialize request argument(s)
request = dataproc_v1.DeleteAutoscalingPolicyRequest(
name="name_value",
)
# Make the request
client.delete_autoscaling_policy(request=request)
Args:
request (Union[google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest, dict]):
The request object. A request to delete an autoscaling
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -419,6 +419,32 @@ def create_autoscaling_policy(
) -> autoscaling_policies.AutoscalingPolicy:
r"""Creates new autoscaling policy.
.. code-block::
from google.cloud import dataproc_v1
def sample_create_autoscaling_policy():
# Create a client
client = dataproc_v1.AutoscalingPolicyServiceClient()
# Initialize request argument(s)
policy = dataproc_v1.AutoscalingPolicy()
policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578
policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789
policy.worker_config.max_instances = 1389
request = dataproc_v1.CreateAutoscalingPolicyRequest(
parent="parent_value",
policy=policy,
)
# Make the request
response = client.create_autoscaling_policy(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dataproc_v1.types.CreateAutoscalingPolicyRequest, dict]):
The request object. A request to create an autoscaling
Expand Down Expand Up @@ -517,6 +543,32 @@ def update_autoscaling_policy(
Disabled check for update_mask, because all updates will be full
replacements.
.. code-block::
from google.cloud import dataproc_v1
def sample_update_autoscaling_policy():
# Create a client
client = dataproc_v1.AutoscalingPolicyServiceClient()
# Initialize request argument(s)
policy = dataproc_v1.AutoscalingPolicy()
policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578
policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789
policy.worker_config.max_instances = 1389
request = dataproc_v1.UpdateAutoscalingPolicyRequest(
policy=policy,
)
# Make the request
response = client.update_autoscaling_policy(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dataproc_v1.types.UpdateAutoscalingPolicyRequest, dict]):
The request object. A request to update an autoscaling
Expand Down Expand Up @@ -592,6 +644,26 @@ def get_autoscaling_policy(
) -> autoscaling_policies.AutoscalingPolicy:
r"""Retrieves autoscaling policy.
.. code-block::
from google.cloud import dataproc_v1
def sample_get_autoscaling_policy():
# Create a client
client = dataproc_v1.AutoscalingPolicyServiceClient()
# Initialize request argument(s)
request = dataproc_v1.GetAutoscalingPolicyRequest(
name="name_value",
)
# Make the request
response = client.get_autoscaling_policy(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dataproc_v1.types.GetAutoscalingPolicyRequest, dict]):
The request object. A request to fetch an autoscaling
Expand Down Expand Up @@ -675,6 +747,27 @@ def list_autoscaling_policies(
) -> pagers.ListAutoscalingPoliciesPager:
r"""Lists autoscaling policies in the project.
.. code-block::
from google.cloud import dataproc_v1
def sample_list_autoscaling_policies():
# Create a client
client = dataproc_v1.AutoscalingPolicyServiceClient()
# Initialize request argument(s)
request = dataproc_v1.ListAutoscalingPoliciesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_autoscaling_policies(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest, dict]):
The request object. A request to list autoscaling
Expand Down Expand Up @@ -771,6 +864,24 @@ def delete_autoscaling_policy(
delete an autoscaling policy that is in use by one or
more clusters.
.. code-block::
from google.cloud import dataproc_v1
def sample_delete_autoscaling_policy():
# Create a client
client = dataproc_v1.AutoscalingPolicyServiceClient()
# Initialize request argument(s)
request = dataproc_v1.DeleteAutoscalingPolicyRequest(
name="name_value",
)
# Make the request
client.delete_autoscaling_policy(request=request)
Args:
request (Union[google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest, dict]):
The request object. A request to delete an autoscaling
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,34 @@ async def create_batch(
r"""Creates a batch workload that executes
asynchronously.
.. code-block::
from google.cloud import dataproc_v1
def sample_create_batch():
# Create a client
client = dataproc_v1.BatchControllerClient()
# Initialize request argument(s)
batch = dataproc_v1.Batch()
batch.pyspark_batch.main_python_file_uri = "main_python_file_uri_value"
request = dataproc_v1.CreateBatchRequest(
parent="parent_value",
batch=batch,
)
# Make the request
operation = client.create_batch(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.dataproc_v1.types.CreateBatchRequest, dict]):
The request object. A request to create a batch
Expand Down Expand Up @@ -324,6 +352,25 @@ async def get_batch(
) -> batches.Batch:
r"""Gets the batch workload resource representation.
.. code-block::
from google.cloud import dataproc_v1
def sample_get_batch():
# Create a client
client = dataproc_v1.BatchControllerClient()
# Initialize request argument(s)
request = dataproc_v1.GetBatchRequest(
name="name_value",
)
# Make the request
response = client.get_batch(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dataproc_v1.types.GetBatchRequest, dict]):
The request object. A request to get the resource
Expand Down Expand Up @@ -395,6 +442,26 @@ async def list_batches(
) -> pagers.ListBatchesAsyncPager:
r"""Lists batch workloads.
.. code-block::
from google.cloud import dataproc_v1
def sample_list_batches():
# Create a client
client = dataproc_v1.BatchControllerClient()
# Initialize request argument(s)
request = dataproc_v1.ListBatchesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_batches(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.dataproc_v1.types.ListBatchesRequest, dict]):
The request object. A request to list batch workloads in
Expand Down Expand Up @@ -476,6 +543,23 @@ async def delete_batch(
terminal state, the delete fails and the response returns
``FAILED_PRECONDITION``.
.. code-block::
from google.cloud import dataproc_v1
def sample_delete_batch():
# Create a client
client = dataproc_v1.BatchControllerClient()
# Initialize request argument(s)
request = dataproc_v1.DeleteBatchRequest(
name="name_value",
)
# Make the request
client.delete_batch(request=request)
Args:
request (Union[google.cloud.dataproc_v1.types.DeleteBatchRequest, dict]):
The request object. A request to delete a batch
Expand Down
Loading

0 comments on commit 6e19333

Please sign in to comment.