Skip to content

Commit

Permalink
[AIRFLOW-5088][AIP-24] Persisting serialized DAG in DB for webserver …
Browse files Browse the repository at this point in the history
…scalability (apache#5992)

Co-authored-by: Ash Berlin-Taylor <[email protected]>
Co-Authored-By: Zhou Fang <[email protected]>
  • Loading branch information
3 people authored and eladkal committed Dec 2, 2019
1 parent eff76b2 commit 5e1f772
Show file tree
Hide file tree
Showing 38 changed files with 2,130 additions and 85 deletions.
11 changes: 10 additions & 1 deletion airflow/api/common/experimental/delete_dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,11 @@
from sqlalchemy import or_

from airflow import models
from airflow.models import TaskFail, DagModel
from airflow.models import TaskFail, DagModel, SerializedDagModel
from airflow.utils.db import provide_session
from airflow.exceptions import DagNotFound
from airflow.settings import STORE_SERIALIZED_DAGS
from airflow.utils.log.logging_mixin import LoggingMixin


@provide_session
Expand All @@ -36,10 +38,17 @@ def delete_dag(dag_id, keep_records_in_log=True, session=None):
:param session: session used
:return count of deleted dags
"""
logger = LoggingMixin()
logger.log.info("Deleting DAG: %s", dag_id)
dag = session.query(DagModel).filter(DagModel.dag_id == dag_id).first()
if dag is None:
raise DagNotFound("Dag id {} not found".format(dag_id))

# Scheduler removes DAGs without files from serialized_dag table every dag_dir_list_interval.
# There may be a lag, so explicitly removes serialized DAG here.
if STORE_SERIALIZED_DAGS and SerializedDagModel.has_dag(dag_id=dag_id, session=session):
SerializedDagModel.remove_dag(dag_id=dag_id, session=session)

count = 0

# noinspection PyUnresolvedReferences,PyProtectedMember
Expand Down
7 changes: 7 additions & 0 deletions airflow/config_templates/default_airflow.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,13 @@ dag_discovery_safe_mode = True
# The number of retries each task is going to have by default. Can be overridden at dag or task level.
default_task_retries = 0

# Whether to serialises DAGs and persist them in DB.
# If set to True, Webserver reads from DB instead of parsing DAG files
# More details: https://airflow.apache.org/howto/enable-dag-serialization.html
store_serialized_dags = False

# Updating serialized DAG can not be faster than a minimum interval to reduce database write rate.
min_serialized_dag_update_interval = 30

[cli]
# In what way should the cli access the API. The LocalClient will use the
Expand Down
44 changes: 44 additions & 0 deletions airflow/migrations/versions/b3b105409875_add_root_dag_id_to_dag.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

"""add root_dag_id to DAG
Revision ID: b3b105409875
Revises: d38e04c12aa2
Create Date: 2019-09-28 23:20:01.744775
"""

import sqlalchemy as sa
from alembic import op

# revision identifiers, used by Alembic.
revision = 'b3b105409875'
down_revision = 'd38e04c12aa2'
branch_labels = None
depends_on = None


def upgrade():
"""Apply add root_dag_id to DAG"""
op.add_column('dag', sa.Column('root_dag_id', sa.String(length=250), nullable=True))
op.create_index('idx_root_dag_id', 'dag', ['root_dag_id'], unique=False)


def downgrade():
"""Unapply add root_dag_id to DAG"""
op.drop_index('idx_root_dag_id', table_name='dag')
op.drop_column('dag', 'root_dag_id')
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

"""add serialized_dag table
Revision ID: d38e04c12aa2
Revises: 6e96a59344a4
Create Date: 2019-08-01 14:39:35.616417
"""
from alembic import op
from sqlalchemy.dialects import mysql
import sqlalchemy as sa

# revision identifiers, used by Alembic.
revision = 'd38e04c12aa2'
down_revision = '6e96a59344a4'
branch_labels = None
depends_on = None


def upgrade():
"""Upgrade version."""
json_type = sa.JSON
conn = op.get_bind() # pylint: disable=no-member

if conn.dialect.name != "postgresql":
# Mysql 5.7+/MariaDB 10.2.3 has JSON support. Rather than checking for
# versions, check for the function existing.
try:
conn.execute("SELECT JSON_VALID(1)").fetchone()
except sa.exc.OperationalError:
json_type = sa.Text

op.create_table('serialized_dag', # pylint: disable=no-member
sa.Column('dag_id', sa.String(length=250), nullable=False),
sa.Column('fileloc', sa.String(length=2000), nullable=False),
sa.Column('fileloc_hash', sa.Integer(), nullable=False),
sa.Column('data', json_type(), nullable=False),
sa.Column('last_updated', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('dag_id'))
op.create_index( # pylint: disable=no-member
'idx_fileloc_hash', 'serialized_dag', ['fileloc_hash'])

if conn.dialect.name == "mysql":
conn.execute("SET time_zone = '+00:00'")
cur = conn.execute("SELECT @@explicit_defaults_for_timestamp")
res = cur.fetchall()
if res[0][0] == 0:
raise Exception(
"Global variable explicit_defaults_for_timestamp needs to be on (1) for mysql"
)

op.alter_column( # pylint: disable=no-member
table_name="serialized_dag",
column_name="last_updated",
type_=mysql.TIMESTAMP(fsp=6),
nullable=False,
)
else:
# sqlite and mssql datetime are fine as is. Therefore, not converting
if conn.dialect.name in ("sqlite", "mssql"):
return

# we try to be database agnostic, but not every db (e.g. sqlserver)
# supports per session time zones
if conn.dialect.name == "postgresql":
conn.execute("set timezone=UTC")

op.alter_column( # pylint: disable=no-member
table_name="serialized_dag",
column_name="last_updated",
type_=sa.TIMESTAMP(timezone=True),
)


def downgrade():
"""Downgrade version."""
op.drop_table('serialized_dag') # pylint: disable=no-member
1 change: 1 addition & 0 deletions airflow/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from airflow.models.log import Log # noqa: F401
from airflow.models.pool import Pool # noqa: F401
from airflow.models.taskfail import TaskFail # noqa: F401
from airflow.models.serialized_dag import SerializedDagModel # noqa: F401
from airflow.models.skipmixin import SkipMixin # noqa: F401
from airflow.models.slamiss import SlaMiss # noqa: F401
from airflow.models.taskinstance import clear_task_instances, TaskInstance # noqa: F401
Expand Down
15 changes: 14 additions & 1 deletion airflow/models/baseoperator.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

from abc import ABCMeta, abstractmethod
from datetime import datetime, timedelta
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Type
from typing import Any, Callable, Dict, FrozenSet, Iterable, List, Optional, Set, Type


from cached_property import cached_property
Expand Down Expand Up @@ -230,6 +230,8 @@ class derived from this one results in the creation of a task object,
ui_color = '#fff' # type str
ui_fgcolor = '#000' # type str

pool = "" # type: str

# base list which includes all the attrs that don't need deep copy.
_base_operator_shallow_copy_attrs = ('user_defined_macros',
'user_defined_filters',
Expand All @@ -242,6 +244,9 @@ class derived from this one results in the creation of a task object,
# Defines the operator level extra links
operator_extra_links = () # type: Iterable[BaseOperatorLink]

# Set at end of file
_serialized_fields = frozenset() # type: FrozenSet[str]

_comps = {
'task_id',
'dag_id',
Expand Down Expand Up @@ -1076,6 +1081,14 @@ def get_extra_links(self, dttm, link_name):
return None


# pylint: disable=protected-access
BaseOperator._serialized_fields = frozenset(
set(vars(BaseOperator(task_id='test')).keys()) - {
'inlets', 'outlets', '_upstream_task_ids', 'default_args'
} | {'_task_type', 'subdag', 'ui_color', 'ui_fgcolor', 'template_fields'}
)


class BaseOperatorLink:
"""
Abstract base class that defines how we get an operator link.
Expand Down
Loading

0 comments on commit 5e1f772

Please sign in to comment.