Skip to content

Commit

Permalink
Workflows assets & system tests migration (AIP-47) (#24105)
Browse files Browse the repository at this point in the history
* Workflows assets & system tests migration (AIP-47)

Co-authored-by: Wojciech Januszek <[email protected]>
  • Loading branch information
wojsamjan and Wojciech Januszek authored Jun 3, 2022
1 parent b5218de commit e13b159
Show file tree
Hide file tree
Showing 8 changed files with 272 additions and 70 deletions.
103 changes: 103 additions & 0 deletions airflow/providers/google/cloud/links/workflows.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Workflows links."""
from typing import TYPE_CHECKING, Optional

from airflow.models import BaseOperator
from airflow.providers.google.cloud.links.base import BaseGoogleLink

if TYPE_CHECKING:
from airflow.utils.context import Context

WORKFLOWS_BASE_LINK = "https://console.cloud.google.com/workflows"
WORKFLOW_LINK = WORKFLOWS_BASE_LINK + "/workflow/{location_id}/{workflow_id}/executions?project={project_id}"
WORKFLOWS_LINK = WORKFLOWS_BASE_LINK + "?project={project_id}"
EXECUTION_LINK = (
WORKFLOWS_BASE_LINK
+ "/workflow/{location_id}/{workflow_id}/execution/{execution_id}?project={project_id}"
)


class WorkflowsWorkflowDetailsLink(BaseGoogleLink):
"""Helper class for constructing Workflow details Link"""

name = "Workflow details"
key = "workflow_details"
format_str = WORKFLOW_LINK

@staticmethod
def persist(
context: "Context",
task_instance: BaseOperator,
location_id: str,
workflow_id: str,
project_id: Optional[str],
):
task_instance.xcom_push(
context,
key=WorkflowsWorkflowDetailsLink.key,
value={"location_id": location_id, "workflow_id": workflow_id, "project_id": project_id},
)


class WorkflowsListOfWorkflowsLink(BaseGoogleLink):
"""Helper class for constructing list of Workflows Link"""

name = "List of workflows"
key = "list_of_workflows"
format_str = WORKFLOWS_LINK

@staticmethod
def persist(
context: "Context",
task_instance: BaseOperator,
project_id: Optional[str],
):
task_instance.xcom_push(
context,
key=WorkflowsListOfWorkflowsLink.key,
value={"project_id": project_id},
)


class WorkflowsExecutionLink(BaseGoogleLink):
"""Helper class for constructing Workflows Execution Link"""

name = "Workflow Execution"
key = "workflow_execution"
format_str = EXECUTION_LINK

@staticmethod
def persist(
context: "Context",
task_instance: BaseOperator,
location_id: str,
workflow_id: str,
execution_id: str,
project_id: Optional[str],
):
task_instance.xcom_push(
context,
key=WorkflowsExecutionLink.key,
value={
"location_id": location_id,
"workflow_id": workflow_id,
"execution_id": execution_id,
"project_id": project_id,
},
)
85 changes: 85 additions & 0 deletions airflow/providers/google/cloud/operators/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,11 @@

from airflow.models import BaseOperator
from airflow.providers.google.cloud.hooks.workflows import WorkflowsHook
from airflow.providers.google.cloud.links.workflows import (
WorkflowsExecutionLink,
WorkflowsListOfWorkflowsLink,
WorkflowsWorkflowDetailsLink,
)

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down Expand Up @@ -60,6 +65,7 @@ class WorkflowsCreateWorkflowOperator(BaseOperator):

template_fields: Sequence[str] = ("location", "workflow", "workflow_id")
template_fields_renderers = {"workflow": "json"}
operator_extra_links = (WorkflowsWorkflowDetailsLink(),)

def __init__(
self,
Expand Down Expand Up @@ -132,6 +138,15 @@ def execute(self, context: 'Context'):
timeout=self.timeout,
metadata=self.metadata,
)

WorkflowsWorkflowDetailsLink.persist(
context=context,
task_instance=self,
location_id=self.location,
workflow_id=self.workflow_id,
project_id=self.project_id or hook.project_id,
)

return Workflow.to_dict(workflow)


Expand Down Expand Up @@ -162,6 +177,7 @@ class WorkflowsUpdateWorkflowOperator(BaseOperator):

template_fields: Sequence[str] = ("workflow_id", "update_mask")
template_fields_renderers = {"update_mask": "json"}
operator_extra_links = (WorkflowsWorkflowDetailsLink(),)

def __init__(
self,
Expand Down Expand Up @@ -209,6 +225,15 @@ def execute(self, context: 'Context'):
metadata=self.metadata,
)
workflow = operation.result()

WorkflowsWorkflowDetailsLink.persist(
context=context,
task_instance=self,
location_id=self.location,
workflow_id=self.workflow_id,
project_id=self.project_id or hook.project_id,
)

return Workflow.to_dict(workflow)


Expand Down Expand Up @@ -296,6 +321,7 @@ class WorkflowsListWorkflowsOperator(BaseOperator):
"""

template_fields: Sequence[str] = ("location", "order_by", "filter_")
operator_extra_links = (WorkflowsListOfWorkflowsLink(),)

def __init__(
self,
Expand Down Expand Up @@ -335,6 +361,13 @@ def execute(self, context: 'Context'):
timeout=self.timeout,
metadata=self.metadata,
)

WorkflowsListOfWorkflowsLink.persist(
context=context,
task_instance=self,
project_id=self.project_id or hook.project_id,
)

return [Workflow.to_dict(w) for w in workflows_iter]


Expand All @@ -357,6 +390,7 @@ class WorkflowsGetWorkflowOperator(BaseOperator):
"""

template_fields: Sequence[str] = ("location", "workflow_id")
operator_extra_links = (WorkflowsWorkflowDetailsLink(),)

def __init__(
self,
Expand Down Expand Up @@ -393,6 +427,15 @@ def execute(self, context: 'Context'):
timeout=self.timeout,
metadata=self.metadata,
)

WorkflowsWorkflowDetailsLink.persist(
context=context,
task_instance=self,
location_id=self.location,
workflow_id=self.workflow_id,
project_id=self.project_id or hook.project_id,
)

return Workflow.to_dict(workflow)


Expand All @@ -418,6 +461,7 @@ class WorkflowsCreateExecutionOperator(BaseOperator):

template_fields: Sequence[str] = ("location", "workflow_id", "execution")
template_fields_renderers = {"execution": "json"}
operator_extra_links = (WorkflowsExecutionLink(),)

def __init__(
self,
Expand Down Expand Up @@ -459,6 +503,16 @@ def execute(self, context: 'Context'):
)
execution_id = execution.name.split("/")[-1]
self.xcom_push(context, key="execution_id", value=execution_id)

WorkflowsExecutionLink.persist(
context=context,
task_instance=self,
location_id=self.location,
workflow_id=self.workflow_id,
execution_id=execution_id,
project_id=self.project_id or hook.project_id,
)

return Execution.to_dict(execution)


Expand All @@ -482,6 +536,7 @@ class WorkflowsCancelExecutionOperator(BaseOperator):
"""

template_fields: Sequence[str] = ("location", "workflow_id", "execution_id")
operator_extra_links = (WorkflowsExecutionLink(),)

def __init__(
self,
Expand Down Expand Up @@ -521,6 +576,16 @@ def execute(self, context: 'Context'):
timeout=self.timeout,
metadata=self.metadata,
)

WorkflowsExecutionLink.persist(
context=context,
task_instance=self,
location_id=self.location,
workflow_id=self.workflow_id,
execution_id=self.execution_id,
project_id=self.project_id or hook.project_id,
)

return Execution.to_dict(execution)


Expand Down Expand Up @@ -549,6 +614,7 @@ class WorkflowsListExecutionsOperator(BaseOperator):
"""

template_fields: Sequence[str] = ("location", "workflow_id")
operator_extra_links = (WorkflowsWorkflowDetailsLink(),)

def __init__(
self,
Expand Down Expand Up @@ -588,6 +654,14 @@ def execute(self, context: 'Context'):
metadata=self.metadata,
)

WorkflowsWorkflowDetailsLink.persist(
context=context,
task_instance=self,
location_id=self.location,
workflow_id=self.workflow_id,
project_id=self.project_id or hook.project_id,
)

return [Execution.to_dict(e) for e in execution_iter if e.start_time > self.start_date_filter]


Expand All @@ -611,6 +685,7 @@ class WorkflowsGetExecutionOperator(BaseOperator):
"""

template_fields: Sequence[str] = ("location", "workflow_id", "execution_id")
operator_extra_links = (WorkflowsExecutionLink(),)

def __init__(
self,
Expand Down Expand Up @@ -650,4 +725,14 @@ def execute(self, context: 'Context'):
timeout=self.timeout,
metadata=self.metadata,
)

WorkflowsExecutionLink.persist(
context=context,
task_instance=self,
location_id=self.location,
workflow_id=self.workflow_id,
execution_id=self.execution_id,
project_id=self.project_id or hook.project_id,
)

return Execution.to_dict(execution)
2 changes: 1 addition & 1 deletion airflow/providers/google/cloud/sensors/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def __init__(
workflow_id: str,
execution_id: str,
location: str,
project_id: str,
project_id: Optional[str] = None,
success_states: Optional[Set[Execution.State]] = None,
failure_states: Optional[Set[Execution.State]] = None,
retry: Union[Retry, _MethodDefault] = DEFAULT,
Expand Down
3 changes: 3 additions & 0 deletions airflow/providers/google/provider.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -908,6 +908,9 @@ extra-links:
- airflow.providers.google.cloud.links.vertex_ai.VertexAIBatchPredictionJobListLink
- airflow.providers.google.cloud.links.vertex_ai.VertexAIEndpointLink
- airflow.providers.google.cloud.links.vertex_ai.VertexAIEndpointListLink
- airflow.providers.google.cloud.links.workflows.WorkflowsWorkflowDetailsLink
- airflow.providers.google.cloud.links.workflows.WorkflowsListOfWorkflowsLink
- airflow.providers.google.cloud.links.workflows.WorkflowsExecutionLink
- airflow.providers.google.cloud.operators.cloud_composer.CloudComposerEnvironmentLink
- airflow.providers.google.cloud.operators.cloud_composer.CloudComposerEnvironmentsLink
- airflow.providers.google.cloud.links.dataflow.DataflowJobLink
Expand Down
Loading

0 comments on commit e13b159

Please sign in to comment.