diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ef6ff5f1d0e4b..5ccc44da3cb9d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -200,7 +200,7 @@ repos: name: Run pydocstyle args: - --convention=pep257 - - --add-ignore=D100,D102,D104,D105,D107,D200,D205,D400,D401 + - --add-ignore=D100,D102,D104,D105,D107,D205,D400,D401 exclude: ^tests/.*\.py$|^scripts/.*\.py$|^dev|^provider_packages|^kubernetes_tests|.*example_dags/.* - repo: local hooks: diff --git a/airflow/api/client/__init__.py b/airflow/api/client/__init__.py index 829af59f8962f..7431dfa4cfc34 100644 --- a/airflow/api/client/__init__.py +++ b/airflow/api/client/__init__.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -API Client that allows interacting with Airflow API -""" +"""API Client that allows interacting with Airflow API""" from importlib import import_module from typing import Any @@ -27,9 +25,7 @@ def get_current_api_client() -> Client: - """ - Return current API Client based on current Airflow configuration - """ + """Return current API Client based on current Airflow configuration""" api_module = import_module(conf.get('cli', 'api_client')) # type: Any auth_backend = api.load_auth() session = None diff --git a/airflow/api/common/experimental/get_lineage.py b/airflow/api/common/experimental/get_lineage.py index fd0102d195947..2d0e97dcdaf8d 100644 --- a/airflow/api/common/experimental/get_lineage.py +++ b/airflow/api/common/experimental/get_lineage.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Lineage apis -""" +"""Lineage apis""" import datetime from typing import Any, Dict, List @@ -29,9 +27,7 @@ @provide_session def get_lineage(dag_id: str, execution_date: datetime.datetime, session=None) -> Dict[str, Dict[str, Any]]: - """ - Gets the lineage information for dag specified - """ + """Gets the lineage information for dag specified""" dag = check_and_get_dag(dag_id) check_and_get_dagrun(dag, execution_date) diff --git a/airflow/api_connexion/endpoints/config_endpoint.py b/airflow/api_connexion/endpoints/config_endpoint.py index 34ed1ac97c5ad..4ba2ac360e932 100644 --- a/airflow/api_connexion/endpoints/config_endpoint.py +++ b/airflow/api_connexion/endpoints/config_endpoint.py @@ -64,9 +64,7 @@ def _config_to_json(config: Config) -> str: @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONFIG)]) def get_config() -> Response: - """ - Get current configuration. - """ + """Get current configuration.""" serializer = { 'text/plain': _config_to_text, 'application/json': _config_to_json, diff --git a/airflow/api_connexion/endpoints/connection_endpoint.py b/airflow/api_connexion/endpoints/connection_endpoint.py index 5e7a4d8a7051c..173dbb521a72d 100644 --- a/airflow/api_connexion/endpoints/connection_endpoint.py +++ b/airflow/api_connexion/endpoints/connection_endpoint.py @@ -37,9 +37,7 @@ @security.requires_access([(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_CONNECTION)]) @provide_session def delete_connection(connection_id, session): - """ - Delete a connection entry - """ + """Delete a connection entry""" connection = session.query(Connection).filter_by(conn_id=connection_id).one_or_none() if connection is None: raise NotFound( @@ -53,9 +51,7 @@ def delete_connection(connection_id, session): @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONNECTION)]) @provide_session def get_connection(connection_id, session): - """ - Get a connection entry - """ + """Get a connection entry""" connection = session.query(Connection).filter(Connection.conn_id == connection_id).one_or_none() if connection is None: raise NotFound( @@ -69,9 +65,7 @@ def get_connection(connection_id, session): @format_parameters({'limit': check_limit}) @provide_session def get_connections(session, limit, offset=0): - """ - Get all connection entries - """ + """Get all connection entries""" total_entries = session.query(func.count(Connection.id)).scalar() query = session.query(Connection) connections = query.order_by(Connection.id).offset(offset).limit(limit).all() @@ -83,9 +77,7 @@ def get_connections(session, limit, offset=0): @security.requires_access([(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_CONNECTION)]) @provide_session def patch_connection(connection_id, session, update_mask=None): - """ - Update a connection entry - """ + """Update a connection entry""" try: data = connection_schema.load(request.json, partial=True) except ValidationError as err: @@ -119,9 +111,7 @@ def patch_connection(connection_id, session, update_mask=None): @security.requires_access([(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_CONNECTION)]) @provide_session def post_connection(session): - """ - Create connection entry - """ + """Create connection entry""" body = request.json try: data = connection_schema.load(body) diff --git a/airflow/api_connexion/endpoints/dag_endpoint.py b/airflow/api_connexion/endpoints/dag_endpoint.py index 7ff69b8df2b23..51af288918ecf 100644 --- a/airflow/api_connexion/endpoints/dag_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_endpoint.py @@ -35,9 +35,7 @@ @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAGS)]) @provide_session def get_dag(dag_id, session): - """ - Get basic information about a DAG. - """ + """Get basic information about a DAG.""" dag = session.query(DagModel).filter(DagModel.dag_id == dag_id).one_or_none() if dag is None: @@ -48,9 +46,7 @@ def get_dag(dag_id, session): @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAGS)]) def get_dag_details(dag_id): - """ - Get details of DAG. - """ + """Get details of DAG.""" dag: DAG = current_app.dag_bag.get_dag(dag_id) if not dag: raise NotFound("DAG not found", detail=f"The DAG with dag_id: {dag_id} was not found") @@ -60,9 +56,7 @@ def get_dag_details(dag_id): @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAGS)]) @format_parameters({'limit': check_limit}) def get_dags(limit, offset=0): - """ - Get all DAGs. - """ + """Get all DAGs.""" readable_dags = current_app.appbuilder.sm.get_readable_dags(g.user) dags = readable_dags.order_by(DagModel.dag_id).offset(offset).limit(limit).all() total_entries = readable_dags.count() @@ -73,9 +67,7 @@ def get_dags(limit, offset=0): @security.requires_access([(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAGS)]) @provide_session def patch_dag(session, dag_id, update_mask=None): - """ - Update the specific DAG - """ + """Update the specific DAG""" dag = session.query(DagModel).filter(DagModel.dag_id == dag_id).one_or_none() if not dag: raise NotFound(f"Dag with id: '{dag_id}' not found") diff --git a/airflow/api_connexion/endpoints/dag_run_endpoint.py b/airflow/api_connexion/endpoints/dag_run_endpoint.py index daf12d62c2834..b700c35c4971f 100644 --- a/airflow/api_connexion/endpoints/dag_run_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_run_endpoint.py @@ -41,9 +41,7 @@ ) @provide_session def delete_dag_run(dag_id, dag_run_id, session): - """ - Delete a DAG Run - """ + """Delete a DAG Run""" if session.query(DagRun).filter(DagRun.dag_id == dag_id, DagRun.run_id == dag_run_id).delete() == 0: raise NotFound(detail=f"DAGRun with DAG ID: '{dag_id}' and DagRun ID: '{dag_run_id}' not found") return NoContent, 204 @@ -57,9 +55,7 @@ def delete_dag_run(dag_id, dag_run_id, session): ) @provide_session def get_dag_run(dag_id, dag_run_id, session): - """ - Get a DAG Run. - """ + """Get a DAG Run.""" dag_run = session.query(DagRun).filter(DagRun.dag_id == dag_id, DagRun.run_id == dag_run_id).one_or_none() if dag_run is None: raise NotFound( @@ -99,9 +95,7 @@ def get_dag_runs( offset=None, limit=None, ): - """ - Get all DAG Runs. - """ + """Get all DAG Runs.""" query = session.query(DagRun) # This endpoint allows specifying ~ as the dag_id to retrieve DAG Runs for all DAGs. @@ -181,9 +175,7 @@ def _apply_date_filters_to_query( ) @provide_session def get_dag_runs_batch(session): - """ - Get list of DAG Runs - """ + """Get list of DAG Runs""" body = request.get_json() try: data = dagruns_batch_form_schema.load(body) @@ -222,9 +214,7 @@ def get_dag_runs_batch(session): ) @provide_session def post_dag_run(dag_id, session): - """ - Trigger a DAG. - """ + """Trigger a DAG.""" if not session.query(DagModel).filter(DagModel.dag_id == dag_id).first(): raise NotFound(title="DAG not found", detail=f"DAG with dag_id: '{dag_id}' not found") diff --git a/airflow/api_connexion/endpoints/dag_source_endpoint.py b/airflow/api_connexion/endpoints/dag_source_endpoint.py index 04c6d6173388f..b023ae60a32a6 100644 --- a/airflow/api_connexion/endpoints/dag_source_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_source_endpoint.py @@ -31,9 +31,7 @@ @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_CODE)]) def get_dag_source(file_token: str): - """ - Get source code using file token - """ + """Get source code using file token""" secret_key = current_app.config["SECRET_KEY"] auth_s = URLSafeSerializer(secret_key) try: diff --git a/airflow/api_connexion/endpoints/event_log_endpoint.py b/airflow/api_connexion/endpoints/event_log_endpoint.py index a3ad53f537546..0c6dcfa0f1489 100644 --- a/airflow/api_connexion/endpoints/event_log_endpoint.py +++ b/airflow/api_connexion/endpoints/event_log_endpoint.py @@ -34,9 +34,7 @@ @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_LOG)]) @provide_session def get_event_log(event_log_id, session): - """ - Get a log entry - """ + """Get a log entry""" event_log = session.query(Log).filter(Log.id == event_log_id).one_or_none() if event_log is None: raise NotFound("Event Log not found") @@ -47,9 +45,7 @@ def get_event_log(event_log_id, session): @format_parameters({'limit': check_limit}) @provide_session def get_event_logs(session, limit, offset=None): - """ - Get all log entries from event log - """ + """Get all log entries from event log""" total_entries = session.query(func.count(Log.id)).scalar() event_logs = session.query(Log).order_by(Log.id).offset(offset).limit(limit).all() return event_log_collection_schema.dump( diff --git a/airflow/api_connexion/endpoints/extra_link_endpoint.py b/airflow/api_connexion/endpoints/extra_link_endpoint.py index b2151a8cc277b..8702466bcb9ee 100644 --- a/airflow/api_connexion/endpoints/extra_link_endpoint.py +++ b/airflow/api_connexion/endpoints/extra_link_endpoint.py @@ -37,9 +37,7 @@ ) @provide_session def get_extra_links(dag_id: str, dag_run_id: str, task_id: str, session): - """ - Get extra links for task instance - """ + """Get extra links for task instance""" dagbag: DagBag = current_app.dag_bag dag: DAG = dagbag.get_dag(dag_id) if not dag: diff --git a/airflow/api_connexion/endpoints/health_endpoint.py b/airflow/api_connexion/endpoints/health_endpoint.py index f3f18aebaf1ee..ea0d62d57a594 100644 --- a/airflow/api_connexion/endpoints/health_endpoint.py +++ b/airflow/api_connexion/endpoints/health_endpoint.py @@ -22,9 +22,7 @@ def get_health(): - """ - Return the health of the airflow scheduler and metadatabase - """ + """Return the health of the airflow scheduler and metadatabase""" metadatabase_status = HEALTHY latest_scheduler_heartbeat = None scheduler_status = UNHEALTHY diff --git a/airflow/api_connexion/endpoints/import_error_endpoint.py b/airflow/api_connexion/endpoints/import_error_endpoint.py index 05b001fc4e03a..0623b3e63a73c 100644 --- a/airflow/api_connexion/endpoints/import_error_endpoint.py +++ b/airflow/api_connexion/endpoints/import_error_endpoint.py @@ -33,9 +33,7 @@ @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_IMPORT_ERROR)]) @provide_session def get_import_error(import_error_id, session): - """ - Get an import error - """ + """Get an import error""" error = session.query(ImportError).filter(ImportError.id == import_error_id).one_or_none() if error is None: @@ -50,9 +48,7 @@ def get_import_error(import_error_id, session): @format_parameters({'limit': check_limit}) @provide_session def get_import_errors(session, limit, offset=None): - """ - Get all import errors - """ + """Get all import errors""" total_entries = session.query(func.count(ImportError.id)).scalar() import_errors = session.query(ImportError).order_by(ImportError.id).offset(offset).limit(limit).all() return import_error_collection_schema.dump( diff --git a/airflow/api_connexion/endpoints/log_endpoint.py b/airflow/api_connexion/endpoints/log_endpoint.py index 4c32489d21edb..e3c8b72a75668 100644 --- a/airflow/api_connexion/endpoints/log_endpoint.py +++ b/airflow/api_connexion/endpoints/log_endpoint.py @@ -37,9 +37,7 @@ ) @provide_session def get_log(session, dag_id, dag_run_id, task_id, task_try_number, full_content=False, token=None): - """ - Get logs for specific task instance - """ + """Get logs for specific task instance""" key = current_app.config["SECRET_KEY"] if not token: metadata = {} diff --git a/airflow/api_connexion/endpoints/pool_endpoint.py b/airflow/api_connexion/endpoints/pool_endpoint.py index 13ba32d4f94ea..1ed5cbf546acc 100644 --- a/airflow/api_connexion/endpoints/pool_endpoint.py +++ b/airflow/api_connexion/endpoints/pool_endpoint.py @@ -31,9 +31,7 @@ @security.requires_access([(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_POOL)]) @provide_session def delete_pool(pool_name: str, session): - """ - Delete a pool - """ + """Delete a pool""" if pool_name == "default_pool": raise BadRequest(detail="Default Pool can't be deleted") elif session.query(Pool).filter(Pool.pool == pool_name).delete() == 0: @@ -45,9 +43,7 @@ def delete_pool(pool_name: str, session): @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_POOL)]) @provide_session def get_pool(pool_name, session): - """ - Get a pool - """ + """Get a pool""" obj = session.query(Pool).filter(Pool.pool == pool_name).one_or_none() if obj is None: raise NotFound(detail=f"Pool with name:'{pool_name}' not found") @@ -58,9 +54,7 @@ def get_pool(pool_name, session): @format_parameters({'limit': check_limit}) @provide_session def get_pools(session, limit, offset=None): - """ - Get all pools - """ + """Get all pools""" total_entries = session.query(func.count(Pool.id)).scalar() pools = session.query(Pool).order_by(Pool.id).offset(offset).limit(limit).all() return pool_collection_schema.dump(PoolCollection(pools=pools, total_entries=total_entries)) @@ -69,9 +63,7 @@ def get_pools(session, limit, offset=None): @security.requires_access([(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_POOL)]) @provide_session def patch_pool(pool_name, session, update_mask=None): - """ - Update a pool - """ + """Update a pool""" # Only slots can be modified in 'default_pool' try: if pool_name == Pool.DEFAULT_POOL_NAME and request.json["name"] != Pool.DEFAULT_POOL_NAME: @@ -120,9 +112,7 @@ def patch_pool(pool_name, session, update_mask=None): @security.requires_access([(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_POOL)]) @provide_session def post_pool(session): - """ - Create a pool - """ + """Create a pool""" required_fields = ["name", "slots"] # Pool would require both fields in the post request for field in required_fields: if field not in request.json.keys(): diff --git a/airflow/api_connexion/endpoints/task_endpoint.py b/airflow/api_connexion/endpoints/task_endpoint.py index 36755dbbcd6c0..5e851ba25bd08 100644 --- a/airflow/api_connexion/endpoints/task_endpoint.py +++ b/airflow/api_connexion/endpoints/task_endpoint.py @@ -31,9 +31,7 @@ ] ) def get_task(dag_id, task_id): - """ - Get simplified representation of a task. - """ + """Get simplified representation of a task.""" dag: DAG = current_app.dag_bag.get_dag(dag_id) if not dag: raise NotFound("DAG not found") @@ -52,9 +50,7 @@ def get_task(dag_id, task_id): ] ) def get_tasks(dag_id): - """ - Get tasks for DAG - """ + """Get tasks for DAG""" dag: DAG = current_app.dag_bag.get_dag(dag_id) if not dag: raise NotFound("DAG not found") diff --git a/airflow/api_connexion/endpoints/task_instance_endpoint.py b/airflow/api_connexion/endpoints/task_instance_endpoint.py index 8ce096133fc50..9cb8c2a8c61be 100644 --- a/airflow/api_connexion/endpoints/task_instance_endpoint.py +++ b/airflow/api_connexion/endpoints/task_instance_endpoint.py @@ -126,9 +126,7 @@ def get_task_instances( offset: Optional[int] = None, session=None, ): # pylint: disable=too-many-arguments - """ - Get list of task instances. - """ + """Get list of task instances.""" base_query = session.query(TI) if dag_id != "~": @@ -179,9 +177,7 @@ def get_task_instances( ) @provide_session def get_task_instances_batch(session=None): - """ - Get list of task instances. - """ + """Get list of task instances.""" body = request.get_json() try: data = task_instance_batch_form.load(body) @@ -239,9 +235,7 @@ def get_task_instances_batch(session=None): ) @provide_session def post_clear_task_instances(dag_id: str, session=None): - """ - Clear task instances. - """ + """Clear task instances.""" body = request.get_json() try: data = clear_task_instance_form.load(body) diff --git a/airflow/api_connexion/endpoints/variable_endpoint.py b/airflow/api_connexion/endpoints/variable_endpoint.py index b7e1bab665492..ea703cf99e189 100644 --- a/airflow/api_connexion/endpoints/variable_endpoint.py +++ b/airflow/api_connexion/endpoints/variable_endpoint.py @@ -31,9 +31,7 @@ @security.requires_access([(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_VARIABLE)]) def delete_variable(variable_key: str) -> Response: - """ - Delete variable - """ + """Delete variable""" if Variable.delete(variable_key) == 0: raise NotFound("Variable not found") return Response(status=204) @@ -41,9 +39,7 @@ def delete_variable(variable_key: str) -> Response: @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_VARIABLE)]) def get_variable(variable_key: str) -> Response: - """ - Get a variables by key - """ + """Get a variables by key""" try: var = Variable.get(variable_key) except KeyError: @@ -55,9 +51,7 @@ def get_variable(variable_key: str) -> Response: @format_parameters({'limit': check_limit}) @provide_session def get_variables(session, limit: Optional[int], offset: Optional[int] = None) -> Response: - """ - Get all variable values - """ + """Get all variable values""" total_entries = session.query(func.count(Variable.id)).scalar() query = session.query(Variable).order_by(Variable.id) if offset: @@ -75,9 +69,7 @@ def get_variables(session, limit: Optional[int], offset: Optional[int] = None) - @security.requires_access([(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_VARIABLE)]) def patch_variable(variable_key: str, update_mask: Optional[List[str]] = None) -> Response: - """ - Update a variable by key - """ + """Update a variable by key""" try: data = variable_schema.load(request.json) except ValidationError as err: @@ -98,9 +90,7 @@ def patch_variable(variable_key: str, update_mask: Optional[List[str]] = None) - @security.requires_access([(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_VARIABLE)]) def post_variables() -> Response: - """ - Create a variable - """ + """Create a variable""" try: data = variable_schema.load(request.json) diff --git a/airflow/api_connexion/endpoints/xcom_endpoint.py b/airflow/api_connexion/endpoints/xcom_endpoint.py index b6608f505f092..190c193d6bd9d 100644 --- a/airflow/api_connexion/endpoints/xcom_endpoint.py +++ b/airflow/api_connexion/endpoints/xcom_endpoint.py @@ -53,9 +53,7 @@ def get_xcom_entries( limit: Optional[int], offset: Optional[int] = None, ) -> XComCollectionSchema: - """ - Get all XCom values - """ + """Get all XCom values""" query = session.query(XCom) if dag_id == '~': appbuilder = current_app.appbuilder @@ -88,9 +86,7 @@ def get_xcom_entries( def get_xcom_entry( dag_id: str, task_id: str, dag_run_id: str, xcom_key: str, session: Session ) -> XComCollectionItemSchema: - """ - Get an XCom entry - """ + """Get an XCom entry""" query = session.query(XCom).filter(XCom.dag_id == dag_id, XCom.task_id == task_id, XCom.key == xcom_key) query = query.join(DR, and_(XCom.dag_id == DR.dag_id, XCom.execution_date == DR.execution_date)) query = query.filter(DR.run_id == dag_run_id) diff --git a/airflow/api_connexion/schemas/common_schema.py b/airflow/api_connexion/schemas/common_schema.py index 4d492033cb04a..7874d27fdff3a 100644 --- a/airflow/api_connexion/schemas/common_schema.py +++ b/airflow/api_connexion/schemas/common_schema.py @@ -148,9 +148,7 @@ class TimezoneField(fields.String): class ClassReferenceSchema(Schema): - """ - Class reference schema. - """ + """Class reference schema.""" module_path = fields.Method("_get_module", required=True) class_name = fields.Method("_get_class_name", required=True) diff --git a/airflow/api_connexion/schemas/connection_schema.py b/airflow/api_connexion/schemas/connection_schema.py index 7c4e49f8bd100..44e3224c812ed 100644 --- a/airflow/api_connexion/schemas/connection_schema.py +++ b/airflow/api_connexion/schemas/connection_schema.py @@ -24,9 +24,7 @@ class ConnectionCollectionItemSchema(SQLAlchemySchema): - """ - Schema for a connection item - """ + """Schema for a connection item""" class Meta: """Meta""" @@ -42,9 +40,7 @@ class Meta: class ConnectionSchema(ConnectionCollectionItemSchema): # pylint: disable=too-many-ancestors - """ - Connection schema - """ + """Connection schema""" password = auto_field(load_only=True) extra = auto_field() diff --git a/airflow/api_connexion/schemas/dag_run_schema.py b/airflow/api_connexion/schemas/dag_run_schema.py index fcab35771af56..31862657ae965 100644 --- a/airflow/api_connexion/schemas/dag_run_schema.py +++ b/airflow/api_connexion/schemas/dag_run_schema.py @@ -43,9 +43,7 @@ def _deserialize(self, value, attr, data, **kwargs): class DAGRunSchema(SQLAlchemySchema): - """ - Schema for DAGRun - """ + """Schema for DAGRun""" class Meta: """Meta""" @@ -64,9 +62,7 @@ class Meta: @pre_load def autogenerate(self, data, **kwargs): - """ - Auto generate run_id and execution_date if they are not loaded - """ + """Auto generate run_id and execution_date if they are not loaded""" if "execution_date" not in data.keys(): data["execution_date"] = str(timezone.utcnow()) if "dag_run_id" not in data.keys(): diff --git a/airflow/api_connexion/schemas/pool_schema.py b/airflow/api_connexion/schemas/pool_schema.py index c1b92f3ba6b91..4a97ef5eb6cc5 100644 --- a/airflow/api_connexion/schemas/pool_schema.py +++ b/airflow/api_connexion/schemas/pool_schema.py @@ -40,30 +40,22 @@ class Meta: @staticmethod def get_occupied_slots(obj: Pool) -> int: - """ - Returns the occupied slots of the pool. - """ + """Returns the occupied slots of the pool.""" return obj.occupied_slots() @staticmethod def get_running_slots(obj: Pool) -> int: - """ - Returns the running slots of the pool. - """ + """Returns the running slots of the pool.""" return obj.running_slots() @staticmethod def get_queued_slots(obj: Pool) -> int: - """ - Returns the queued slots of the pool. - """ + """Returns the queued slots of the pool.""" return obj.queued_slots() @staticmethod def get_open_slots(obj: Pool) -> float: - """ - Returns the open slots of the pool. - """ + """Returns the open slots of the pool.""" return obj.open_slots() diff --git a/airflow/api_connexion/schemas/xcom_schema.py b/airflow/api_connexion/schemas/xcom_schema.py index 01b93b5caf13b..b3f3f0dd021a2 100644 --- a/airflow/api_connexion/schemas/xcom_schema.py +++ b/airflow/api_connexion/schemas/xcom_schema.py @@ -23,9 +23,7 @@ class XComCollectionItemSchema(SQLAlchemySchema): - """ - Schema for a xcom item - """ + """Schema for a xcom item""" class Meta: """Meta""" @@ -40,9 +38,7 @@ class Meta: class XComSchema(XComCollectionItemSchema): - """ - XCom schema - """ + """XCom schema""" value = auto_field() diff --git a/airflow/api_connexion/security.py b/airflow/api_connexion/security.py index 271652a3281e2..631244e9ea326 100644 --- a/airflow/api_connexion/security.py +++ b/airflow/api_connexion/security.py @@ -74,9 +74,7 @@ def check_authorization( def requires_access(permissions: Optional[Sequence[Tuple[str, str]]] = None) -> Callable[[T], T]: - """ - Factory for decorator that checks current user's permissions against required permissions. - """ + """Factory for decorator that checks current user's permissions against required permissions.""" appbuilder = current_app.appbuilder appbuilder.sm.sync_resource_permissions(permissions) diff --git a/airflow/cli/cli_parser.py b/airflow/cli/cli_parser.py index a3e3cfde43cfa..336f954cccfcb 100644 --- a/airflow/cli/cli_parser.py +++ b/airflow/cli/cli_parser.py @@ -1472,13 +1472,9 @@ def get_parser(dag_parser: bool = False) -> argparse.ArgumentParser: def _sort_args(args: Iterable[Arg]) -> Iterable[Arg]: - """ - Sort subcommand optional args, keep positional args - """ + """Sort subcommand optional args, keep positional args""" def get_long_option(arg: Arg): - """ - Get long option from Arg.flags - """ + """Get long option from Arg.flags""" return arg.flags[0] if len(arg.flags) == 1 else arg.flags[1] positional, optional = partition(lambda x: x.flags[0].startswith("-"), args) yield from positional diff --git a/airflow/cli/commands/dag_command.py b/airflow/cli/commands/dag_command.py index 0e055ce67e2e1..ee8bfee44fce4 100644 --- a/airflow/cli/commands/dag_command.py +++ b/airflow/cli/commands/dag_command.py @@ -149,9 +149,7 @@ def dag_backfill(args, dag=None): @cli_utils.action_logging def dag_trigger(args): - """ - Creates a dag run for the specified dag - """ + """Creates a dag run for the specified dag""" api_client = get_current_api_client() try: message = api_client.trigger_dag(dag_id=args.dag_id, @@ -165,9 +163,7 @@ def dag_trigger(args): @cli_utils.action_logging def dag_delete(args): - """ - Deletes all DB records related to the specified dag - """ + """Deletes all DB records related to the specified dag""" api_client = get_current_api_client() if args.yes or input( "This will drop all existing records related to the specified DAG. " diff --git a/airflow/cli/commands/info_command.py b/airflow/cli/commands/info_command.py index bcf9c142eef56..3fcad451464a0 100644 --- a/airflow/cli/commands/info_command.py +++ b/airflow/cli/commands/info_command.py @@ -436,9 +436,7 @@ def _send_report_to_fileio(info): def show_info(args): - """ - Show information related to Airflow, system and other. - """ + """Show information related to Airflow, system and other.""" # Enforce anonymization, when file_io upload is tuned on. anonymizer = PiiAnonymizer() if args.anonymize or args.file_io else NullAnonymizer() info = AirflowInfo(anonymizer) diff --git a/airflow/cli/commands/pool_command.py b/airflow/cli/commands/pool_command.py index a3851890fbd1d..bb3a07e0730fa 100644 --- a/airflow/cli/commands/pool_command.py +++ b/airflow/cli/commands/pool_command.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Pools sub-commands -""" +"""Pools sub-commands""" import json import os from json import JSONDecodeError diff --git a/airflow/cli/commands/task_command.py b/airflow/cli/commands/task_command.py index 57c7b82152d56..145d78b917b1d 100644 --- a/airflow/cli/commands/task_command.py +++ b/airflow/cli/commands/task_command.py @@ -100,9 +100,7 @@ def _run_task_by_executor(args, dag, ti): def _run_task_by_local_task_job(args, ti): - """ - Run LocalTaskJob, which monitors the raw task execution process - """ + """Run LocalTaskJob, which monitors the raw task execution process""" run_job = LocalTaskJob( task_instance=ti, mark_success=args.mark_success, diff --git a/airflow/cli/commands/webserver_command.py b/airflow/cli/commands/webserver_command.py index 9875e76693981..3540388cac5b4 100644 --- a/airflow/cli/commands/webserver_command.py +++ b/airflow/cli/commands/webserver_command.py @@ -147,9 +147,7 @@ def _get_num_workers_running(self) -> int: return len(workers) def _wait_until_true(self, fn, timeout: int = 0) -> None: - """ - Sleeps until fn is true - """ + """Sleeps until fn is true""" start_time = time.time() while not fn(): if 0 < timeout <= time.time() - start_time: @@ -203,9 +201,7 @@ def _reload_gunicorn(self) -> None: ) def start(self) -> NoReturn: - """ - Starts monitoring the webserver. - """ + """Starts monitoring the webserver.""" try: # pylint: disable=too-many-nested-blocks self._wait_until_true( lambda: self.num_workers_expected == self._get_num_workers_running(), diff --git a/airflow/configuration.py b/airflow/configuration.py index 32386a2e0aeee..e88c9b16e8293 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -68,9 +68,7 @@ def expand_env_var(env_var): def run_command(command): - """ - Runs command and returns stdout - """ + """Runs command and returns stdout""" process = subprocess.Popen( shlex.split(command), stdout=subprocess.PIPE, diff --git a/airflow/contrib/hooks/aws_hook.py b/airflow/contrib/hooks/aws_hook.py index f6f02ffced3d3..55bd95139e70e 100644 --- a/airflow/contrib/hooks/aws_hook.py +++ b/airflow/contrib/hooks/aws_hook.py @@ -29,9 +29,7 @@ class AwsHook(AwsBaseHook): - """ - This class is deprecated. Please use `airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook`. - """ + """This class is deprecated. Please use `airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook`.""" def __init__(self, *args, **kwargs): warnings.warn( diff --git a/airflow/contrib/hooks/gcp_api_base_hook.py b/airflow/contrib/hooks/gcp_api_base_hook.py index f937fdc3f0a45..ca173f44955bb 100644 --- a/airflow/contrib/hooks/gcp_api_base_hook.py +++ b/airflow/contrib/hooks/gcp_api_base_hook.py @@ -28,7 +28,8 @@ class GoogleCloudBaseHook(GoogleBaseHook): """ - This class is deprecated. Please use `airflow.providers.google.common.hooks.base_google.GoogleBaseHook`. + This class is deprecated. Please use + `airflow.providers.google.common.hooks.base_google.GoogleBaseHook`. """ def __init__(self, *args, **kwargs): diff --git a/airflow/contrib/hooks/gcp_container_hook.py b/airflow/contrib/hooks/gcp_container_hook.py index 43b9cb1c7c5bb..5d8d900a90d4d 100644 --- a/airflow/contrib/hooks/gcp_container_hook.py +++ b/airflow/contrib/hooks/gcp_container_hook.py @@ -28,9 +28,7 @@ class GKEClusterHook(GKEHook): - """ - This class is deprecated. Please use `airflow.providers.google.cloud.hooks.container.GKEHook`. - """ + """This class is deprecated. Please use `airflow.providers.google.cloud.hooks.container.GKEHook`.""" def __init__(self, *args, **kwargs): warnings.warn( diff --git a/airflow/contrib/hooks/gcp_dataflow_hook.py b/airflow/contrib/hooks/gcp_dataflow_hook.py index 8018da03dafdf..54080c513fafa 100644 --- a/airflow/contrib/hooks/gcp_dataflow_hook.py +++ b/airflow/contrib/hooks/gcp_dataflow_hook.py @@ -28,9 +28,7 @@ class DataFlowHook(DataflowHook): - """ - This class is deprecated. Please use `airflow.providers.google.cloud.hooks.dataflow.DataflowHook`. - """ + """This class is deprecated. Please use `airflow.providers.google.cloud.hooks.dataflow.DataflowHook`.""" def __init__(self, *args, **kwargs): warnings.warn( diff --git a/airflow/contrib/hooks/gcp_function_hook.py b/airflow/contrib/hooks/gcp_function_hook.py index 80f21921ebf57..d684d8d451658 100644 --- a/airflow/contrib/hooks/gcp_function_hook.py +++ b/airflow/contrib/hooks/gcp_function_hook.py @@ -29,7 +29,8 @@ class GcfHook(CloudFunctionsHook): """ - This class is deprecated. Please use `airflow.providers.google.cloud.hooks.functions.CloudFunctionsHook`. + This class is deprecated. Please use + `airflow.providers.google.cloud.hooks.functions.CloudFunctionsHook`. """ def __init__(self, *args, **kwargs): diff --git a/airflow/contrib/hooks/gcp_kms_hook.py b/airflow/contrib/hooks/gcp_kms_hook.py index 7591b7793f37c..6bd57399b4b38 100644 --- a/airflow/contrib/hooks/gcp_kms_hook.py +++ b/airflow/contrib/hooks/gcp_kms_hook.py @@ -28,9 +28,7 @@ class GoogleCloudKMSHook(CloudKMSHook): - """ - This class is deprecated. Please use `airflow.providers.google.cloud.hooks.kms.CloudKMSHook`. - """ + """This class is deprecated. Please use `airflow.providers.google.cloud.hooks.kms.CloudKMSHook`.""" def __init__(self, *args, **kwargs): warnings.warn( diff --git a/airflow/contrib/hooks/gcp_natural_language_hook.py b/airflow/contrib/hooks/gcp_natural_language_hook.py index 3ba50be8397df..70f9405c01e2a 100644 --- a/airflow/contrib/hooks/gcp_natural_language_hook.py +++ b/airflow/contrib/hooks/gcp_natural_language_hook.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.hooks.natural_language`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.hooks.natural_language`.""" import warnings diff --git a/airflow/contrib/hooks/gcp_spanner_hook.py b/airflow/contrib/hooks/gcp_spanner_hook.py index 8ba35fc755df6..c5c28800f125c 100644 --- a/airflow/contrib/hooks/gcp_spanner_hook.py +++ b/airflow/contrib/hooks/gcp_spanner_hook.py @@ -29,9 +29,7 @@ class CloudSpannerHook(SpannerHook): - """ - This class is deprecated. Please use `airflow.providers.google.cloud.hooks.spanner.SpannerHook`. - """ + """This class is deprecated. Please use `airflow.providers.google.cloud.hooks.spanner.SpannerHook`.""" def __init__(self, *args, **kwargs): warnings.warn(self.__doc__, DeprecationWarning, stacklevel=2) diff --git a/airflow/contrib/hooks/gcp_speech_to_text_hook.py b/airflow/contrib/hooks/gcp_speech_to_text_hook.py index ab9fefdf19d0c..dedece29b11c3 100644 --- a/airflow/contrib/hooks/gcp_speech_to_text_hook.py +++ b/airflow/contrib/hooks/gcp_speech_to_text_hook.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.hooks.speech_to_text`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.hooks.speech_to_text`.""" import warnings diff --git a/airflow/contrib/hooks/gcp_sql_hook.py b/airflow/contrib/hooks/gcp_sql_hook.py index 677942521b0fa..de2919f0abfd5 100644 --- a/airflow/contrib/hooks/gcp_sql_hook.py +++ b/airflow/contrib/hooks/gcp_sql_hook.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.hooks.cloud_sql`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.hooks.cloud_sql`.""" import warnings @@ -42,9 +40,7 @@ def __init__(self, *args, **kwargs): class CloudSqlHook(CloudSQLHook): - """ - This class is deprecated. Please use `airflow.providers.google.cloud.hooks.sql.CloudSQLHook`. - """ + """This class is deprecated. Please use `airflow.providers.google.cloud.hooks.sql.CloudSQLHook`.""" def __init__(self, *args, **kwargs): warnings.warn(self.__doc__, DeprecationWarning, stacklevel=2) diff --git a/airflow/contrib/hooks/gcp_tasks_hook.py b/airflow/contrib/hooks/gcp_tasks_hook.py index 5919244b1f902..e577de66e6130 100644 --- a/airflow/contrib/hooks/gcp_tasks_hook.py +++ b/airflow/contrib/hooks/gcp_tasks_hook.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.hooks.tasks`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.hooks.tasks`.""" import warnings diff --git a/airflow/contrib/hooks/gcp_text_to_speech_hook.py b/airflow/contrib/hooks/gcp_text_to_speech_hook.py index 0c9bf81783e3e..593502f9ab7a1 100644 --- a/airflow/contrib/hooks/gcp_text_to_speech_hook.py +++ b/airflow/contrib/hooks/gcp_text_to_speech_hook.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.hooks.text_to_speech`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.hooks.text_to_speech`.""" import warnings diff --git a/airflow/contrib/hooks/gcp_transfer_hook.py b/airflow/contrib/hooks/gcp_transfer_hook.py index 38fd83a0f0825..535c24fe2cf06 100644 --- a/airflow/contrib/hooks/gcp_transfer_hook.py +++ b/airflow/contrib/hooks/gcp_transfer_hook.py @@ -16,7 +16,8 @@ # specific language governing permissions and limitations # under the License. """ -This module is deprecated. Please use `airflow.providers.google.cloud.hooks.cloud_storage_transfer_service`. +This module is deprecated. Please use +`airflow.providers.google.cloud.hooks.cloud_storage_transfer_service`. """ import warnings diff --git a/airflow/contrib/hooks/gcp_translate_hook.py b/airflow/contrib/hooks/gcp_translate_hook.py index d5f14a9c6707a..016f8efc6175c 100644 --- a/airflow/contrib/hooks/gcp_translate_hook.py +++ b/airflow/contrib/hooks/gcp_translate_hook.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.hooks.translate`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.hooks.translate`.""" import warnings diff --git a/airflow/contrib/hooks/gcp_video_intelligence_hook.py b/airflow/contrib/hooks/gcp_video_intelligence_hook.py index a90cd6f74cce3..76ad51ef68ceb 100644 --- a/airflow/contrib/hooks/gcp_video_intelligence_hook.py +++ b/airflow/contrib/hooks/gcp_video_intelligence_hook.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.hooks.video_intelligence`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.hooks.video_intelligence`.""" import warnings diff --git a/airflow/contrib/hooks/gcp_vision_hook.py b/airflow/contrib/hooks/gcp_vision_hook.py index 4092318fac0e4..05ac86a5e86f2 100644 --- a/airflow/contrib/hooks/gcp_vision_hook.py +++ b/airflow/contrib/hooks/gcp_vision_hook.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.hooks.vision`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.hooks.vision`.""" import warnings diff --git a/airflow/contrib/hooks/gcs_hook.py b/airflow/contrib/hooks/gcs_hook.py index f7c43c50862f5..625e1aa3313c7 100644 --- a/airflow/contrib/hooks/gcs_hook.py +++ b/airflow/contrib/hooks/gcs_hook.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.hooks.gcs`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.hooks.gcs`.""" import warnings from airflow.providers.google.cloud.hooks.gcs import GCSHook @@ -29,9 +27,7 @@ class GoogleCloudStorageHook(GCSHook): - """ - This class is deprecated. Please use `airflow.providers.google.cloud.hooks.gcs.GCSHook`. - """ + """This class is deprecated. Please use `airflow.providers.google.cloud.hooks.gcs.GCSHook`.""" def __init__(self, *args, **kwargs): warnings.warn( diff --git a/airflow/contrib/hooks/sftp_hook.py b/airflow/contrib/hooks/sftp_hook.py index bd74e65e4a2c7..1870c86e56e95 100644 --- a/airflow/contrib/hooks/sftp_hook.py +++ b/airflow/contrib/hooks/sftp_hook.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.sftp.hooks.sftp`. -""" +"""This module is deprecated. Please use `airflow.providers.sftp.hooks.sftp`.""" import warnings diff --git a/airflow/contrib/operators/awsbatch_operator.py b/airflow/contrib/operators/awsbatch_operator.py index 04335c58a2870..f5f0e94646eb5 100644 --- a/airflow/contrib/operators/awsbatch_operator.py +++ b/airflow/contrib/operators/awsbatch_operator.py @@ -43,7 +43,8 @@ # pylint: disable=too-many-ancestors class AWSBatchOperator(AwsBatchOperator): """ - This class is deprecated. Please use `airflow.providers.amazon.aws.operators.batch.AwsBatchOperator`. + This class is deprecated. Please use + `airflow.providers.amazon.aws.operators.batch.AwsBatchOperator`. """ def __init__(self, *args, **kwargs): @@ -59,7 +60,8 @@ def __init__(self, *args, **kwargs): @runtime_checkable class BatchProtocol(AwsBatchProtocol, Protocol): """ - This class is deprecated. Please use `airflow.providers.amazon.aws.hooks.batch_client.AwsBatchProtocol`. + This class is deprecated. Please use + `airflow.providers.amazon.aws.hooks.batch_client.AwsBatchProtocol`. """ # A Protocol cannot be instantiated diff --git a/airflow/contrib/operators/azure_container_instances_operator.py b/airflow/contrib/operators/azure_container_instances_operator.py index 002cbbec5da6e..56ea84fb7bf27 100644 --- a/airflow/contrib/operators/azure_container_instances_operator.py +++ b/airflow/contrib/operators/azure_container_instances_operator.py @@ -16,9 +16,9 @@ # specific language governing permissions and limitations # under the License. """ -This module is deprecated. Please use `airflow.providers.microsoft.azure.operators.azure_container_instances`. +This module is deprecated. Please use +`airflow.providers.microsoft.azure.operators.azure_container_instances`. """ - import warnings # pylint: disable=unused-import diff --git a/airflow/contrib/operators/cassandra_to_gcs.py b/airflow/contrib/operators/cassandra_to_gcs.py index da8e6be0ce656..9ac70f8f1da46 100644 --- a/airflow/contrib/operators/cassandra_to_gcs.py +++ b/airflow/contrib/operators/cassandra_to_gcs.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.transfers.cassandra_to_gcs`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.cassandra_to_gcs`.""" import warnings diff --git a/airflow/contrib/operators/ecs_operator.py b/airflow/contrib/operators/ecs_operator.py index 83066cf338ccd..37fc32d8c59d9 100644 --- a/airflow/contrib/operators/ecs_operator.py +++ b/airflow/contrib/operators/ecs_operator.py @@ -31,9 +31,7 @@ @runtime_checkable class ECSProtocol(NewECSProtocol, Protocol): - """ - This class is deprecated. Please use `airflow.providers.amazon.aws.operators.ecs.ECSProtocol`. - """ + """This class is deprecated. Please use `airflow.providers.amazon.aws.operators.ecs.ECSProtocol`.""" # A Protocol cannot be instantiated diff --git a/airflow/contrib/operators/file_to_gcs.py b/airflow/contrib/operators/file_to_gcs.py index 57f53f8c6f1ba..7b7b259d33f15 100644 --- a/airflow/contrib/operators/file_to_gcs.py +++ b/airflow/contrib/operators/file_to_gcs.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.transfers.local_to_gcs`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.local_to_gcs`.""" import warnings diff --git a/airflow/contrib/operators/gcp_container_operator.py b/airflow/contrib/operators/gcp_container_operator.py index 22838ac0fc982..886571d3d0dd9 100644 --- a/airflow/contrib/operators/gcp_container_operator.py +++ b/airflow/contrib/operators/gcp_container_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.operators.kubernetes_engine`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.kubernetes_engine`.""" import warnings diff --git a/airflow/contrib/operators/gcp_natural_language_operator.py b/airflow/contrib/operators/gcp_natural_language_operator.py index fe4929a838f30..daac37eca33eb 100644 --- a/airflow/contrib/operators/gcp_natural_language_operator.py +++ b/airflow/contrib/operators/gcp_natural_language_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.operators.natural_language`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.natural_language`.""" import warnings diff --git a/airflow/contrib/operators/gcp_speech_to_text_operator.py b/airflow/contrib/operators/gcp_speech_to_text_operator.py index 02e065b10c9c3..4d2c644096a00 100644 --- a/airflow/contrib/operators/gcp_speech_to_text_operator.py +++ b/airflow/contrib/operators/gcp_speech_to_text_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.operators.speech_to_text`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.speech_to_text`.""" import warnings diff --git a/airflow/contrib/operators/gcp_sql_operator.py b/airflow/contrib/operators/gcp_sql_operator.py index dc330b2c28029..00a5b6edb399d 100644 --- a/airflow/contrib/operators/gcp_sql_operator.py +++ b/airflow/contrib/operators/gcp_sql_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.operators.cloud_sql`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.cloud_sql`.""" import warnings @@ -36,7 +34,8 @@ class CloudSqlBaseOperator(CloudSQLBaseOperator): """ - This class is deprecated. Please use `airflow.providers.google.cloud.operators.sql.CloudSQLBaseOperator`. + This class is deprecated. Please use + `airflow.providers.google.cloud.operators.sql.CloudSQLBaseOperator`. """ def __init__(self, *args, **kwargs): diff --git a/airflow/contrib/operators/gcp_tasks_operator.py b/airflow/contrib/operators/gcp_tasks_operator.py index 1d68740a9c548..1e39bdd0f7a70 100644 --- a/airflow/contrib/operators/gcp_tasks_operator.py +++ b/airflow/contrib/operators/gcp_tasks_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.operators.tasks`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.tasks`.""" import warnings diff --git a/airflow/contrib/operators/gcp_text_to_speech_operator.py b/airflow/contrib/operators/gcp_text_to_speech_operator.py index 49e01efd48253..9a43b7cee0a0f 100644 --- a/airflow/contrib/operators/gcp_text_to_speech_operator.py +++ b/airflow/contrib/operators/gcp_text_to_speech_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.operators.text_to_speech`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.text_to_speech`.""" import warnings diff --git a/airflow/contrib/operators/gcp_translate_operator.py b/airflow/contrib/operators/gcp_translate_operator.py index e43b360b8a334..8b1c939d20c34 100644 --- a/airflow/contrib/operators/gcp_translate_operator.py +++ b/airflow/contrib/operators/gcp_translate_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.operators.translate`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.translate`.""" import warnings diff --git a/airflow/contrib/operators/gcp_video_intelligence_operator.py b/airflow/contrib/operators/gcp_video_intelligence_operator.py index d56949f4bdf46..401c70d577c56 100644 --- a/airflow/contrib/operators/gcp_video_intelligence_operator.py +++ b/airflow/contrib/operators/gcp_video_intelligence_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.operators.video_intelligence`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.video_intelligence`.""" import warnings diff --git a/airflow/contrib/operators/gcp_vision_operator.py b/airflow/contrib/operators/gcp_vision_operator.py index 7bd86aebe6a0d..99f89737882fc 100644 --- a/airflow/contrib/operators/gcp_vision_operator.py +++ b/airflow/contrib/operators/gcp_vision_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.operators.vision`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.vision`.""" import warnings diff --git a/airflow/contrib/operators/gcs_acl_operator.py b/airflow/contrib/operators/gcs_acl_operator.py index 8b6f373509c76..4803c994d7dbe 100644 --- a/airflow/contrib/operators/gcs_acl_operator.py +++ b/airflow/contrib/operators/gcs_acl_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs`.""" import warnings diff --git a/airflow/contrib/operators/gcs_delete_operator.py b/airflow/contrib/operators/gcs_delete_operator.py index 3abcabbf0000b..e02926f7a9890 100644 --- a/airflow/contrib/operators/gcs_delete_operator.py +++ b/airflow/contrib/operators/gcs_delete_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs`.""" import warnings diff --git a/airflow/contrib/operators/gcs_download_operator.py b/airflow/contrib/operators/gcs_download_operator.py index bfa0a22b17e26..43f959d529f7c 100644 --- a/airflow/contrib/operators/gcs_download_operator.py +++ b/airflow/contrib/operators/gcs_download_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs`.""" import warnings diff --git a/airflow/contrib/operators/gcs_list_operator.py b/airflow/contrib/operators/gcs_list_operator.py index d76cc9a334cfa..de17ce6d03daa 100644 --- a/airflow/contrib/operators/gcs_list_operator.py +++ b/airflow/contrib/operators/gcs_list_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs`.""" import warnings diff --git a/airflow/contrib/operators/gcs_operator.py b/airflow/contrib/operators/gcs_operator.py index f0aa9f47246c4..bef41c5fc4dab 100644 --- a/airflow/contrib/operators/gcs_operator.py +++ b/airflow/contrib/operators/gcs_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs`.""" import warnings diff --git a/airflow/contrib/operators/gcs_to_bq.py b/airflow/contrib/operators/gcs_to_bq.py index 37b5a5ffc56e8..27af7b7953a74 100644 --- a/airflow/contrib/operators/gcs_to_bq.py +++ b/airflow/contrib/operators/gcs_to_bq.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.transfers.gcs_to_bigquery`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.gcs_to_bigquery`.""" import warnings diff --git a/airflow/contrib/operators/gcs_to_gcs.py b/airflow/contrib/operators/gcs_to_gcs.py index cd4b12e5caa47..4737fa0bf06f3 100644 --- a/airflow/contrib/operators/gcs_to_gcs.py +++ b/airflow/contrib/operators/gcs_to_gcs.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.transfers.gcs_to_gcs`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.gcs_to_gcs`.""" import warnings diff --git a/airflow/contrib/operators/gcs_to_s3.py b/airflow/contrib/operators/gcs_to_s3.py index 80a483fcfadab..dd358ee44cc87 100644 --- a/airflow/contrib/operators/gcs_to_s3.py +++ b/airflow/contrib/operators/gcs_to_s3.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3`. -""" +"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3`.""" import warnings @@ -31,7 +29,8 @@ class GoogleCloudStorageToS3Operator(GCSToS3Operator): """ - This class is deprecated. Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator`. + This class is deprecated. Please use + `airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator`. """ def __init__(self, *args, **kwargs): diff --git a/airflow/contrib/operators/sftp_operator.py b/airflow/contrib/operators/sftp_operator.py index 1a3fbfd3ad0c1..7bf19b2bb2afa 100644 --- a/airflow/contrib/operators/sftp_operator.py +++ b/airflow/contrib/operators/sftp_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.sftp.operators.sftp`. -""" +"""This module is deprecated. Please use `airflow.providers.sftp.operators.sftp`.""" import warnings diff --git a/airflow/contrib/sensors/sftp_sensor.py b/airflow/contrib/sensors/sftp_sensor.py index 8c82608dd1840..24108f4011050 100644 --- a/airflow/contrib/sensors/sftp_sensor.py +++ b/airflow/contrib/sensors/sftp_sensor.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.sftp.sensors.sftp`. -""" +"""This module is deprecated. Please use `airflow.providers.sftp.sensors.sftp`.""" import warnings diff --git a/airflow/contrib/utils/gcp_field_sanitizer.py b/airflow/contrib/utils/gcp_field_sanitizer.py index 4861e6af2b3c5..1e39bb287bf42 100644 --- a/airflow/contrib/utils/gcp_field_sanitizer.py +++ b/airflow/contrib/utils/gcp_field_sanitizer.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.utils.field_sanitizer`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.utils.field_sanitizer`""" import warnings diff --git a/airflow/contrib/utils/gcp_field_validator.py b/airflow/contrib/utils/gcp_field_validator.py index f1ec39652ed41..08d677c2c17e0 100644 --- a/airflow/contrib/utils/gcp_field_validator.py +++ b/airflow/contrib/utils/gcp_field_validator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.utils.field_validator`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.utils.field_validator`.""" import warnings diff --git a/airflow/contrib/utils/log/__init__.py b/airflow/contrib/utils/log/__init__.py index cfdcad9153f53..aecb6b83be87f 100644 --- a/airflow/contrib/utils/log/__init__.py +++ b/airflow/contrib/utils/log/__init__.py @@ -14,9 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This package is deprecated. Please use `airflow.utils.log`. -""" +"""This package is deprecated. Please use `airflow.utils.log`.""" import warnings diff --git a/airflow/contrib/utils/log/task_handler_with_custom_formatter.py b/airflow/contrib/utils/log/task_handler_with_custom_formatter.py index 620f9afe1926c..a6ebd66483c85 100644 --- a/airflow/contrib/utils/log/task_handler_with_custom_formatter.py +++ b/airflow/contrib/utils/log/task_handler_with_custom_formatter.py @@ -14,9 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.utils.log.task_handler_with_custom_formatter`. -""" +"""This module is deprecated. Please use `airflow.utils.log.task_handler_with_custom_formatter`.""" import warnings diff --git a/airflow/contrib/utils/weekday.py b/airflow/contrib/utils/weekday.py index ebcd09414fbb2..85e85cceed5ea 100644 --- a/airflow/contrib/utils/weekday.py +++ b/airflow/contrib/utils/weekday.py @@ -14,9 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.utils.weekday`. -""" +"""This module is deprecated. Please use `airflow.utils.weekday`.""" import warnings # pylint: disable=unused-import diff --git a/airflow/dag/base_dag.py b/airflow/dag/base_dag.py index 33842deb7f300..19f76bebdb431 100644 --- a/airflow/dag/base_dag.py +++ b/airflow/dag/base_dag.py @@ -22,9 +22,7 @@ class BaseDag(metaclass=ABCMeta): - """ - Base DAG object that both the SimpleDag and DAG inherit. - """ + """Base DAG object that both the SimpleDag and DAG inherit.""" @property @abstractmethod @@ -72,9 +70,7 @@ def pickle_id(self): class BaseDagBag: - """ - Base object that both the SimpleDagBag and DagBag inherit. - """ + """Base object that both the SimpleDagBag and DagBag inherit.""" @property @abstractmethod diff --git a/airflow/executors/base_executor.py b/airflow/executors/base_executor.py index 6467025ed768d..8477019de614d 100644 --- a/airflow/executors/base_executor.py +++ b/airflow/executors/base_executor.py @@ -14,9 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Base executor - this is the base class for all the implemented executors. -""" +"""Base executor - this is the base class for all the implemented executors.""" import sys from collections import OrderedDict from typing import Any, Dict, List, Optional, Set, Tuple @@ -68,9 +66,7 @@ def __init__(self, parallelism: int = PARALLELISM): self.event_buffer: Dict[TaskInstanceKey, EventBufferValueType] = {} def start(self): # pragma: no cover - """ - Executors may need to get things started. - """ + """Executors may need to get things started.""" def queue_command(self, task_instance: TaskInstance, @@ -135,9 +131,7 @@ def sync(self) -> None: """ def heartbeat(self) -> None: - """ - Heartbeat sent to trigger new jobs. - """ + """Heartbeat sent to trigger new jobs.""" if not self.parallelism: open_slots = len(self.queued_tasks) else: @@ -265,9 +259,7 @@ def end(self) -> None: # pragma: no cover raise NotImplementedError() def terminate(self): - """ - This method is called when the daemon receives a SIGTERM - """ + """This method is called when the daemon receives a SIGTERM""" raise NotImplementedError() def try_adopt_task_instances(self, tis: List[TaskInstance]) -> List[TaskInstance]: @@ -286,9 +278,7 @@ def try_adopt_task_instances(self, tis: List[TaskInstance]) -> List[TaskInstance @property def slots_available(self): - """ - Number of new tasks this executor instance can accept - """ + """Number of new tasks this executor instance can accept""" if self.parallelism: return self.parallelism - len(self.running) - len(self.queued_tasks) else: diff --git a/airflow/executors/celery_executor.py b/airflow/executors/celery_executor.py index 148d6bda4df82..12cf60ccd1b89 100644 --- a/airflow/executors/celery_executor.py +++ b/airflow/executors/celery_executor.py @@ -479,9 +479,7 @@ def __init__(self, sync_parralelism=None): self._sync_parallelism = sync_parralelism def get_many(self, async_results) -> Mapping[str, EventBufferValueType]: - """ - Gets status for many Celery tasks using the best method available. - """ + """Gets status for many Celery tasks using the best method available.""" if isinstance(app.backend, BaseKeyValueStoreBackend): result = self._get_many_from_kv_backend(async_results) return result diff --git a/airflow/executors/celery_kubernetes_executor.py b/airflow/executors/celery_kubernetes_executor.py index ef82c2060585f..0c400f0910079 100644 --- a/airflow/executors/celery_kubernetes_executor.py +++ b/airflow/executors/celery_kubernetes_executor.py @@ -42,9 +42,7 @@ def __init__(self, celery_executor, kubernetes_executor): @property def queued_tasks(self) -> Dict[TaskInstanceKey, QueuedTaskInstanceType]: - """ - Return queued tasks from celery and kubernetes executor - """ + """Return queued tasks from celery and kubernetes executor""" queued_tasks = self.celery_executor.queued_tasks.copy() queued_tasks.update(self.kubernetes_executor.queued_tasks) @@ -52,9 +50,7 @@ def queued_tasks(self) -> Dict[TaskInstanceKey, QueuedTaskInstanceType]: @property def running(self) -> Set[TaskInstanceKey]: - """ - Return running tasks from celery and kubernetes executor - """ + """Return running tasks from celery and kubernetes executor""" return self.celery_executor.running.union(self.kubernetes_executor.running) def start(self) -> None: @@ -115,9 +111,7 @@ def has_task(self, task_instance: TaskInstance) -> bool: or self.kubernetes_executor.has_task(task_instance) def heartbeat(self) -> None: - """ - Heartbeat sent to trigger new jobs in celery and kubernetes executor - """ + """Heartbeat sent to trigger new jobs in celery and kubernetes executor""" self.celery_executor.heartbeat() self.kubernetes_executor.heartbeat() @@ -156,16 +150,12 @@ def try_adopt_task_instances(self, tis: List[TaskInstance]) -> List[TaskInstance return abandoned_tis def end(self) -> None: - """ - End celery and kubernetes executor - """ + """End celery and kubernetes executor""" self.celery_executor.end() self.kubernetes_executor.end() def terminate(self) -> None: - """ - Terminate celery and kubernetes executor - """ + """Terminate celery and kubernetes executor""" self.celery_executor.terminate() self.kubernetes_executor.terminate() diff --git a/airflow/executors/dask_executor.py b/airflow/executors/dask_executor.py index 0b916f10fd353..e2cb64fda9961 100644 --- a/airflow/executors/dask_executor.py +++ b/airflow/executors/dask_executor.py @@ -35,9 +35,7 @@ class DaskExecutor(BaseExecutor): - """ - DaskExecutor submits tasks to a Dask Distributed cluster. - """ + """DaskExecutor submits tasks to a Dask Distributed cluster.""" def __init__(self, cluster_address=None): super().__init__(parallelism=0) diff --git a/airflow/executors/debug_executor.py b/airflow/executors/debug_executor.py index 3b21e3d80de34..391b7c701e53a 100644 --- a/airflow/executors/debug_executor.py +++ b/airflow/executors/debug_executor.py @@ -50,9 +50,7 @@ def __init__(self): self.fail_fast = conf.getboolean("debug", "fail_fast") def execute_async(self, *args, **kwargs) -> None: # pylint: disable=signature-differs - """ - The method is replaced by custom trigger_task implementation. - """ + """The method is replaced by custom trigger_task implementation.""" def sync(self) -> None: task_succeeded = True @@ -101,9 +99,7 @@ def queue_task_instance( pool: Optional[str] = None, cfg_path: Optional[str] = None, ) -> None: - """ - Queues task instance with empty command because we do not need it. - """ + """Queues task instance with empty command because we do not need it.""" self.queue_command( task_instance, [str(task_instance)], # Just for better logging, it's not used anywhere diff --git a/airflow/executors/executor_loader.py b/airflow/executors/executor_loader.py index d5a8a51ff8d69..6039a7adcf598 100644 --- a/airflow/executors/executor_loader.py +++ b/airflow/executors/executor_loader.py @@ -27,9 +27,7 @@ class ExecutorLoader: - """ - Keeps constants for all the currently available executors. - """ + """Keeps constants for all the currently available executors.""" LOCAL_EXECUTOR = "LocalExecutor" SEQUENTIAL_EXECUTOR = "SequentialExecutor" @@ -109,9 +107,7 @@ def load_executor(cls, executor_name: str) -> BaseExecutor: @classmethod def __load_celery_kubernetes_executor(cls) -> BaseExecutor: - """ - :return: an instance of CeleryKubernetesExecutor - """ + """:return: an instance of CeleryKubernetesExecutor""" celery_executor = import_string(cls.executors[cls.CELERY_EXECUTOR])() kubernetes_executor = import_string(cls.executors[cls.KUBERNETES_EXECUTOR])() diff --git a/airflow/executors/local_executor.py b/airflow/executors/local_executor.py index c3c8358dbda3d..1b3072e26004a 100644 --- a/airflow/executors/local_executor.py +++ b/airflow/executors/local_executor.py @@ -126,9 +126,7 @@ def _execute_work_in_fork(self, command: CommandType) -> str: @abstractmethod def do_work(self): - """ - Called in the subprocess and should then execute tasks - """ + """Called in the subprocess and should then execute tasks""" raise NotImplementedError() @@ -239,9 +237,7 @@ def execute_async(self, # pylint: enable=unused-argument # pragma: no cover def sync(self) -> None: - """ - Sync will get called periodically by the heartbeat method. - """ + """Sync will get called periodically by the heartbeat method.""" if not self.executor.result_queue: raise AirflowException("Executor should be started first") while not self.executor.result_queue.empty(): @@ -307,9 +303,7 @@ def execute_async( self.queue.put((key, command)) def sync(self): - """ - Sync will get called periodically by the heartbeat method. - """ + """Sync will get called periodically by the heartbeat method.""" while True: try: results = self.executor.result_queue.get_nowait() @@ -354,9 +348,7 @@ def execute_async(self, key: TaskInstanceKey, self.impl.execute_async(key=key, command=command, queue=queue, executor_config=executor_config) def sync(self) -> None: - """ - Sync will get called periodically by the heartbeat method. - """ + """Sync will get called periodically by the heartbeat method.""" if not self.impl: raise AirflowException(NOT_STARTED_MESSAGE) self.impl.sync() diff --git a/airflow/hooks/dbapi_hook.py b/airflow/hooks/dbapi_hook.py index 41b660fc951ed..3a803a15969ac 100644 --- a/airflow/hooks/dbapi_hook.py +++ b/airflow/hooks/dbapi_hook.py @@ -27,9 +27,7 @@ class ConnectorProtocol(Protocol): - """ - A protocol where you can connect to a database. - """ + """A protocol where you can connect to a database.""" def connect(self, host: str, port: int, username: str, schema: str) -> Any: """ @@ -44,9 +42,7 @@ def connect(self, host: str, port: int, username: str, schema: str) -> Any: class DbApiHook(BaseHook): - """ - Abstract base class for sql hooks. - """ + """Abstract base class for sql hooks.""" # Override to provide the connection name. conn_name_attr = None # type: str @@ -69,8 +65,7 @@ def __init__(self, *args, **kwargs): setattr(self, self.conn_name_attr, kwargs[self.conn_name_attr]) def get_conn(self): - """Returns a connection object - """ + """Returns a connection object""" db = self.get_connection(getattr(self, self.conn_name_attr)) return self.connector.connect( host=db.host, @@ -200,9 +195,7 @@ def run(self, sql, autocommit=False, parameters=None): conn.commit() def set_autocommit(self, conn, autocommit): - """ - Sets the autocommit flag on the connection - """ + """Sets the autocommit flag on the connection""" if not self.supports_autocommit and autocommit: self.log.warning( "%s connection doesn't support autocommit but autocommit activated.", @@ -225,9 +218,7 @@ def get_autocommit(self, conn): return getattr(conn, 'autocommit', False) and self.supports_autocommit def get_cursor(self): - """ - Returns a cursor - """ + """Returns a cursor""" return self.get_conn().cursor() @staticmethod diff --git a/airflow/jobs/base_job.py b/airflow/jobs/base_job.py index a6b418a228a27..120acacdc1085 100644 --- a/airflow/jobs/base_job.py +++ b/airflow/jobs/base_job.py @@ -145,9 +145,7 @@ def is_alive(self, grace_multiplier=2.1): @provide_session def kill(self, session=None): - """ - Handles on_kill callback and updates state in database. - """ + """Handles on_kill callback and updates state in database.""" job = session.query(BaseJob).filter(BaseJob.id == self.id).first() job.end_date = timezone.utcnow() try: @@ -159,14 +157,10 @@ def kill(self, session=None): raise AirflowException("Job shut down externally.") def on_kill(self): - """ - Will be called when an external kill command is received - """ + """Will be called when an external kill command is received""" def heartbeat_callback(self, session=None): - """ - Callback that is called during heartbeat. This method should be overwritten. - """ + """Callback that is called during heartbeat. This method should be overwritten.""" def heartbeat(self, only_if_necessary: bool = False): """ @@ -238,9 +232,7 @@ def heartbeat(self, only_if_necessary: bool = False): self.latest_heartbeat = previous_heartbeat def run(self): - """ - Starts the job. - """ + """Starts the job.""" Stats.incr(self.__class__.__name__.lower() + '_start', 1, 1) # Adding an entry in the DB with create_session() as session: diff --git a/airflow/jobs/local_task_job.py b/airflow/jobs/local_task_job.py index 35dd8970e5bdf..0c3e86215c44e 100644 --- a/airflow/jobs/local_task_job.py +++ b/airflow/jobs/local_task_job.py @@ -34,9 +34,7 @@ class LocalTaskJob(BaseJob): - """ - LocalTaskJob runs a single task instance. - """ + """LocalTaskJob runs a single task instance.""" __mapper_args__ = { 'polymorphic_identity': 'LocalTaskJob' diff --git a/airflow/jobs/scheduler_job.py b/airflow/jobs/scheduler_job.py index ac0abf9601c54..a331e7b870ddb 100644 --- a/airflow/jobs/scheduler_job.py +++ b/airflow/jobs/scheduler_job.py @@ -200,9 +200,7 @@ def _run_file_processor( result_channel.close() def start(self) -> None: - """ - Launch the process and start processing the DAG. - """ + """Launch the process and start processing the DAG.""" start_method = self._get_multiprocessing_start_method() context = multiprocessing.get_context(start_method) @@ -234,9 +232,7 @@ def start(self) -> None: self._parent_channel = _parent_channel def kill(self) -> None: - """ - Kill the process launched to process the file, and ensure consistent state. - """ + """Kill the process launched to process the file, and ensure consistent state.""" if self._process is None: raise AirflowException("Tried to kill before starting!") self._kill_process() @@ -789,16 +785,12 @@ def __init__( self.dagbag = DagBag(read_dags_from_db=True) def register_exit_signals(self) -> None: - """ - Register signals that stop child processes - """ + """Register signals that stop child processes""" signal.signal(signal.SIGINT, self._exit_gracefully) signal.signal(signal.SIGTERM, self._exit_gracefully) def _exit_gracefully(self, signum, frame) -> None: # pylint: disable=unused-argument - """ - Helper method to clean up processor_agent to avoid leaving orphan processes. - """ + """Helper method to clean up processor_agent to avoid leaving orphan processes.""" self.log.info("Exiting gracefully upon receiving signal %s", signum) if self.processor_agent: self.processor_agent.end() @@ -1218,9 +1210,7 @@ def _change_state_for_tasks_failed_to_execute(self, session: Session = None): @provide_session def _process_executor_events(self, session: Session = None) -> int: - """ - Respond to executor events. - """ + """Respond to executor events.""" if not self.processor_agent: raise ValueError("Processor agent is not started.") ti_primary_key_to_try_number_map: Dict[Tuple[str, str, datetime.datetime], int] = {} @@ -1344,9 +1334,7 @@ def _create_dag_file_processor( dag_ids: Optional[List[str]], pickle_dags: bool ) -> DagFileProcessorProcess: - """ - Creates DagFileProcessorProcess instance. - """ + """Creates DagFileProcessorProcess instance.""" return DagFileProcessorProcess( file_path=file_path, pickle_dags=pickle_dags, diff --git a/airflow/kubernetes/k8s_model.py b/airflow/kubernetes/k8s_model.py index 2eb72bbcb06d5..c604cb3fa81b5 100644 --- a/airflow/kubernetes/k8s_model.py +++ b/airflow/kubernetes/k8s_model.py @@ -14,9 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Classes for interacting with Kubernetes API. -""" +"""Classes for interacting with Kubernetes API.""" from abc import ABC, abstractmethod from functools import reduce from typing import List, Optional diff --git a/airflow/kubernetes/pod_generator.py b/airflow/kubernetes/pod_generator.py index 10c26d42bae10..e7fb151a50bc8 100644 --- a/airflow/kubernetes/pod_generator.py +++ b/airflow/kubernetes/pod_generator.py @@ -45,9 +45,7 @@ class PodDefaults: - """ - Static defaults for Pods - """ + """Static defaults for Pods""" XCOM_MOUNT_PATH = '/airflow/xcom' SIDECAR_CONTAINER_NAME = 'airflow-xcom-sidecar' diff --git a/airflow/kubernetes/pod_generator_deprecated.py b/airflow/kubernetes/pod_generator_deprecated.py index 8792de99fe8ab..cdf9a9182b99c 100644 --- a/airflow/kubernetes/pod_generator_deprecated.py +++ b/airflow/kubernetes/pod_generator_deprecated.py @@ -34,9 +34,7 @@ class PodDefaults: - """ - Static defaults for Pods - """ + """Static defaults for Pods""" XCOM_MOUNT_PATH = '/airflow/xcom' SIDECAR_CONTAINER_NAME = 'airflow-xcom-sidecar' diff --git a/airflow/kubernetes/pod_runtime_info_env.py b/airflow/kubernetes/pod_runtime_info_env.py index 7477d0f4526f7..57ac8de4bd33d 100644 --- a/airflow/kubernetes/pod_runtime_info_env.py +++ b/airflow/kubernetes/pod_runtime_info_env.py @@ -14,9 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Classes for interacting with Kubernetes API -""" +"""Classes for interacting with Kubernetes API""" import copy from kubernetes.client import models as k8s @@ -41,9 +39,7 @@ def __init__(self, name, field_path): self.field_path = field_path def to_k8s_client_obj(self) -> k8s.V1EnvVar: - """ - :return: kubernetes.client.models.V1EnvVar - """ + """:return: kubernetes.client.models.V1EnvVar""" return k8s.V1EnvVar( name=self.name, value_from=k8s.V1EnvVarSource( diff --git a/airflow/kubernetes/refresh_config.py b/airflow/kubernetes/refresh_config.py index 2885b76f12d0d..e76bf5ae58637 100644 --- a/airflow/kubernetes/refresh_config.py +++ b/airflow/kubernetes/refresh_config.py @@ -67,9 +67,7 @@ def _load_from_exec_plugin(self): logging.error(str(e)) def refresh_api_key(self, client_configuration): - """ - Refresh API key if expired - """ + """Refresh API key if expired""" if self.api_key_expire_ts and time.time() >= self.api_key_expire_ts: self.load_and_set(client_configuration) diff --git a/airflow/kubernetes/secret.py b/airflow/kubernetes/secret.py index d799fe2a5663a..c31b749a29ce4 100644 --- a/airflow/kubernetes/secret.py +++ b/airflow/kubernetes/secret.py @@ -14,9 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Classes for interacting with Kubernetes API -""" +"""Classes for interacting with Kubernetes API""" import copy import uuid from typing import Tuple diff --git a/airflow/lineage/__init__.py b/airflow/lineage/__init__.py index 1230acd410e17..7641f39489ede 100644 --- a/airflow/lineage/__init__.py +++ b/airflow/lineage/__init__.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Provides lineage support functions -""" +"""Provides lineage support functions""" import json import logging from functools import wraps @@ -41,9 +39,7 @@ @attr.s(auto_attribs=True) class Metadata: - """ - Class for serialized entities. - """ + """Class for serialized entities.""" type_name: str = attr.ib() source: str = attr.ib() @@ -51,26 +47,20 @@ class Metadata: def _get_instance(meta: Metadata): - """ - Instantiate an object from Metadata - """ + """Instantiate an object from Metadata""" cls = import_string(meta.type_name) return structure(meta.data, cls) def _render_object(obj: Any, context) -> Any: - """ - Renders a attr annotated object. Will set non serializable attributes to none - """ + """Renders a attr annotated object. Will set non serializable attributes to none""" return structure(json.loads(ENV.from_string( json.dumps(unstructure(obj), default=lambda o: None) ).render(**context).encode('utf-8')), type(obj)) def _to_dataset(obj: Any, source: str) -> Optional[Metadata]: - """ - Create Metadata from attr annotated object - """ + """Create Metadata from attr annotated object""" if not attr.has(obj): return None diff --git a/airflow/lineage/entities.py b/airflow/lineage/entities.py index e4570f336dc9a..9ded4ff67c935 100644 --- a/airflow/lineage/entities.py +++ b/airflow/lineage/entities.py @@ -27,9 +27,7 @@ @attr.s(auto_attribs=True) class File: - """ - File entity. Refers to a file - """ + """File entity. Refers to a file""" url: str = attr.ib() type_hint: Optional[str] = None @@ -37,9 +35,7 @@ class File: @attr.s(auto_attribs=True, kw_only=True) class User: - """ - User entity. Identifies a user - """ + """User entity. Identifies a user""" email: str = attr.ib() first_name: Optional[str] = None @@ -48,18 +44,14 @@ class User: @attr.s(auto_attribs=True, kw_only=True) class Tag: - """ - Tag or classification entity. - """ + """Tag or classification entity.""" tag_name: str = attr.ib() @attr.s(auto_attribs=True, kw_only=True) class Column: - """ - Column of a Table - """ + """Column of a Table""" name: str = attr.ib() description: Optional[str] = None @@ -77,9 +69,7 @@ def default_if_none(arg: Optional[bool]) -> bool: # noqa: D103 @attr.s(auto_attribs=True, kw_only=True) class Table: - """ - Table entity - """ + """Table entity""" database: str = attr.ib() cluster: str = attr.ib() diff --git a/airflow/migrations/versions/004c1210f153_increase_queue_name_size_limit.py b/airflow/migrations/versions/004c1210f153_increase_queue_name_size_limit.py index 76245097b4946..b7253686c3acc 100644 --- a/airflow/migrations/versions/004c1210f153_increase_queue_name_size_limit.py +++ b/airflow/migrations/versions/004c1210f153_increase_queue_name_size_limit.py @@ -45,9 +45,7 @@ def upgrade(): def downgrade(): - """ - Revert column size from 256 to 50 characters, might result in data loss. - """ + """Revert column size from 256 to 50 characters, might result in data loss.""" # use batch_alter_table to support SQLite workaround with op.batch_alter_table('task_instance') as batch_op: batch_op.alter_column('queue', type_=sa.String(50)) diff --git a/airflow/migrations/versions/52d53670a240_fix_mssql_exec_date_rendered_task_instance.py b/airflow/migrations/versions/52d53670a240_fix_mssql_exec_date_rendered_task_instance.py index b76491b3357ea..c2898b86423a1 100644 --- a/airflow/migrations/versions/52d53670a240_fix_mssql_exec_date_rendered_task_instance.py +++ b/airflow/migrations/versions/52d53670a240_fix_mssql_exec_date_rendered_task_instance.py @@ -38,7 +38,8 @@ def upgrade(): """ - Recreate RenderedTaskInstanceFields table changing timestamp to datetime2(6) when using MSSQL as backend + Recreate RenderedTaskInstanceFields table changing timestamp to datetime2(6) when using MSSQL as + backend """ conn = op.get_bind() if conn.dialect.name == "mssql": @@ -57,7 +58,8 @@ def upgrade(): def downgrade(): """ - Recreate RenderedTaskInstanceFields table changing datetime2(6) to timestamp when using MSSQL as backend + Recreate RenderedTaskInstanceFields table changing datetime2(6) to timestamp when using MSSQL as + backend """ conn = op.get_bind() if conn.dialect.name == "mssql": diff --git a/airflow/migrations/versions/6e96a59344a4_make_taskinstance_pool_not_nullable.py b/airflow/migrations/versions/6e96a59344a4_make_taskinstance_pool_not_nullable.py index ae7ac8561ef94..8526a9a7446c6 100644 --- a/airflow/migrations/versions/6e96a59344a4_make_taskinstance_pool_not_nullable.py +++ b/airflow/migrations/versions/6e96a59344a4_make_taskinstance_pool_not_nullable.py @@ -82,9 +82,7 @@ class TaskInstance(Base): # type: ignore def upgrade(): - """ - Make TaskInstance.pool field not nullable. - """ + """Make TaskInstance.pool field not nullable.""" with create_session() as session: session.query(TaskInstance) \ .filter(TaskInstance.pool.is_(None)) \ @@ -109,9 +107,7 @@ def upgrade(): def downgrade(): - """ - Make TaskInstance.pool field nullable. - """ + """Make TaskInstance.pool field nullable.""" conn = op.get_bind() if conn.dialect.name == "mssql": op.drop_index('ti_pool', table_name='task_instance') diff --git a/airflow/migrations/versions/74effc47d867_change_datetime_to_datetime2_6_on_mssql_.py b/airflow/migrations/versions/74effc47d867_change_datetime_to_datetime2_6_on_mssql_.py index 4e49e82aea198..3a6746d1bdba7 100644 --- a/airflow/migrations/versions/74effc47d867_change_datetime_to_datetime2_6_on_mssql_.py +++ b/airflow/migrations/versions/74effc47d867_change_datetime_to_datetime2_6_on_mssql_.py @@ -37,9 +37,7 @@ def upgrade(): - """ - Change datetime to datetime2(6) when using MSSQL as backend - """ + """Change datetime to datetime2(6) when using MSSQL as backend""" conn = op.get_bind() if conn.dialect.name == "mssql": result = conn.execute( @@ -130,9 +128,7 @@ def upgrade(): def downgrade(): - """ - Change datetime2(6) back to datetime - """ + """Change datetime2(6) back to datetime""" conn = op.get_bind() if conn.dialect.name == "mssql": result = conn.execute( diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index 8965d7046c37c..495843c8f9848 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Base operator for all operators. -""" +"""Base operator for all operators.""" import abc import copy import functools @@ -68,9 +66,7 @@ class BaseOperatorMeta(abc.ABCMeta): - """ - Base metaclass of BaseOperator. - """ + """Base metaclass of BaseOperator.""" def __call__(cls, *args, **kwargs): """ @@ -570,34 +566,24 @@ def __setattr__(self, key, value): self.set_xcomargs_dependencies() def add_inlets(self, inlets: Iterable[Any]): - """ - Sets inlets to this operator - """ + """Sets inlets to this operator""" self._inlets.extend(inlets) def add_outlets(self, outlets: Iterable[Any]): - """ - Defines the outlets of this operator - """ + """Defines the outlets of this operator""" self._outlets.extend(outlets) def get_inlet_defs(self): - """ - :return: list of inlets defined for this operator - """ + """:return: list of inlets defined for this operator""" return self._inlets def get_outlet_defs(self): - """ - :return: list of outlets defined for this operator - """ + """:return: list of outlets defined for this operator""" return self._outlets @property def dag(self) -> Any: - """ - Returns the Operator's DAG if set, otherwise raises an error - """ + """Returns the Operator's DAG if set, otherwise raises an error""" if self.has_dag(): return self._dag else: @@ -628,9 +614,7 @@ def dag(self, dag: Any): self._dag = dag def has_dag(self): - """ - Returns True if the Operator has been assigned to a DAG. - """ + """Returns True if the Operator has been assigned to a DAG.""" return getattr(self, '_dag', None) is not None @property @@ -765,9 +749,7 @@ def global_operator_extra_link_dict(self) -> Dict[str, Any]: @prepare_lineage def pre_execute(self, context: Any): - """ - This hook is triggered right before self.execute() is called. - """ + """This hook is triggered right before self.execute() is called.""" def execute(self, context: Any): """ @@ -1031,9 +1013,7 @@ def get_task_instances(self, start_date: Optional[datetime] = None, def get_flat_relative_ids(self, upstream: bool = False, found_descendants: Optional[Set[str]] = None) -> Set[str]: - """ - Get a flat set of relatives' ids, either upstream or downstream. - """ + """Get a flat set of relatives' ids, either upstream or downstream.""" if not self._dag: return set() @@ -1051,9 +1031,7 @@ def get_flat_relative_ids(self, return found_descendants def get_flat_relatives(self, upstream: bool = False): - """ - Get a flat list of relatives, either upstream or downstream. - """ + """Get a flat list of relatives, either upstream or downstream.""" if not self._dag: return set() from airflow.models.dag import DAG @@ -1068,9 +1046,7 @@ def run( ignore_first_depends_on_past: bool = True, ignore_ti_state: bool = False, mark_success: bool = False) -> None: - """ - Run a set of task instances for a date range. - """ + """Run a set of task instances for a date range.""" start_date = start_date or self.start_date end_date = end_date or self.end_date or timezone.utcnow() @@ -1320,10 +1296,7 @@ def get_serialized_fields(cls): return cls.__serialized_fields def is_smart_sensor_compatible(self): - """ - Return if this operator can use smart service. Default False. - - """ + """Return if this operator can use smart service. Default False.""" return False @@ -1419,9 +1392,7 @@ def cross_downstream(from_tasks: Sequence[BaseOperator], @attr.s(auto_attribs=True) class BaseOperatorLink(metaclass=ABCMeta): - """ - Abstract base class that defines how we get an operator link. - """ + """Abstract base class that defines how we get an operator link.""" operators: ClassVar[List[Type[BaseOperator]]] = [] """ diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 45bd398dc4030..71f936d4339f8 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -630,9 +630,7 @@ def get_run_dates(self, start_date, end_date=None): return run_dates def normalize_schedule(self, dttm): - """ - Returns dttm + interval unless dttm is first interval then it returns dttm - """ + """Returns dttm + interval unless dttm is first interval then it returns dttm""" following = self.following_schedule(dttm) # in case of @once @@ -715,9 +713,7 @@ def task_group(self) -> "TaskGroup": @property def filepath(self) -> str: - """ - File location of where the dag object is instantiated - """ + """File location of where the dag object is instantiated""" fn = self.full_filepath.replace(settings.DAGS_FOLDER + '/', '') fn = fn.replace(os.path.dirname(__file__) + '/', '') return fn @@ -756,9 +752,7 @@ def get_concurrency_reached(self, session=None) -> bool: @property def concurrency_reached(self): - """ - This attribute is deprecated. Please use `airflow.models.DAG.get_concurrency_reached` method. - """ + """This attribute is deprecated. Please use `airflow.models.DAG.get_concurrency_reached` method.""" warnings.warn( "This attribute is deprecated. Please use `airflow.models.DAG.get_concurrency_reached` method.", DeprecationWarning, @@ -768,18 +762,14 @@ def concurrency_reached(self): @provide_session def get_is_paused(self, session=None): - """ - Returns a boolean indicating whether this DAG is paused - """ + """Returns a boolean indicating whether this DAG is paused""" qry = session.query(DagModel).filter( DagModel.dag_id == self.dag_id) return qry.value(DagModel.is_paused) @property def is_paused(self): - """ - This attribute is deprecated. Please use `airflow.models.DAG.get_is_paused` method. - """ + """This attribute is deprecated. Please use `airflow.models.DAG.get_is_paused` method.""" warnings.warn( "This attribute is deprecated. Please use `airflow.models.DAG.get_is_paused` method.", DeprecationWarning, @@ -911,18 +901,14 @@ def get_dagruns_between(self, start_date, end_date, session=None): @provide_session def get_latest_execution_date(self, session=None): - """ - Returns the latest date for which at least one dag run exists - """ + """Returns the latest date for which at least one dag run exists""" return session.query(func.max(DagRun.execution_date)).filter( DagRun.dag_id == self.dag_id ).scalar() @property def latest_execution_date(self): - """ - This attribute is deprecated. Please use `airflow.models.DAG.get_latest_execution_date` method. - """ + """This attribute is deprecated. Please use `airflow.models.DAG.get_latest_execution_date` method.""" warnings.warn( "This attribute is deprecated. Please use `airflow.models.DAG.get_latest_execution_date` method.", DeprecationWarning, @@ -932,9 +918,7 @@ def latest_execution_date(self): @property def subdags(self): - """ - Returns a list of the subdag objects associated to this DAG - """ + """Returns a list of the subdag objects associated to this DAG""" # Check SubDag for class but don't check class directly from airflow.operators.subdag_operator import SubDagOperator subdag_lst = [] @@ -1636,9 +1620,7 @@ def run( job.run() def cli(self): - """ - Exposes a CLI specific to this DAG - """ + """Exposes a CLI specific to this DAG""" from airflow.cli import cli_parser parser = cli_parser.get_parser(dag_parser=True) args = parser.parse_args() @@ -1947,9 +1929,7 @@ def get_serialized_fields(cls): class DagTag(Base): - """ - A tag name per dag, to allow quick filtering in the DAG view. - """ + """A tag name per dag, to allow quick filtering in the DAG view.""" __tablename__ = "dag_tag" name = Column(String(100), primary_key=True) diff --git a/airflow/models/dagbag.py b/airflow/models/dagbag.py index a2fc4bd4bdf93..fd428f8e34d12 100644 --- a/airflow/models/dagbag.py +++ b/airflow/models/dagbag.py @@ -48,9 +48,7 @@ class FileLoadStat(NamedTuple): - """ - Information about single file - """ + """Information about single file""" file: str duration: timedelta @@ -130,9 +128,7 @@ def __init__( safe_mode=safe_mode) def size(self) -> int: - """ - :return: the amount of dags contained in this dagbag - """ + """:return: the amount of dags contained in this dagbag""" return len(self.dags) @property @@ -525,9 +521,7 @@ def dagbag_report(self): @provide_session def sync_to_db(self, session: Optional[Session] = None): - """ - Save attributes about list of DAG to the DB. - """ + """Save attributes about list of DAG to the DB.""" # To avoid circular import - airflow.models.dagbag -> airflow.models.dag -> airflow.models.dagbag from airflow.models.dag import DAG from airflow.models.serialized_dag import SerializedDagModel diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index 2108f45247a2a..9e454005c7bff 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -274,9 +274,7 @@ def generate_run_id(run_type: DagRunType, execution_date: datetime) -> str: @provide_session def get_task_instances(self, state=None, session=None): - """ - Returns the task instances for this dag run - """ + """Returns the task instances for this dag run""" tis = session.query(TI).filter( TI.dag_id == self.dag_id, TI.execution_date == self.execution_date, diff --git a/airflow/models/log.py b/airflow/models/log.py index 0c6236a5b9304..8f12c63f514ad 100644 --- a/airflow/models/log.py +++ b/airflow/models/log.py @@ -24,9 +24,7 @@ class Log(Base): - """ - Used to actively log events to the database - """ + """Used to actively log events to the database""" __tablename__ = "log" diff --git a/airflow/models/pool.py b/airflow/models/pool.py index 60fa9264e510b..541a464c076cb 100644 --- a/airflow/models/pool.py +++ b/airflow/models/pool.py @@ -40,9 +40,7 @@ class PoolStats(TypedDict): class Pool(Base): - """ - the class to get Pool info. - """ + """the class to get Pool info.""" __tablename__ = "slot_pool" diff --git a/airflow/models/renderedtifields.py b/airflow/models/renderedtifields.py index 515fb68756655..661b0a8ac219e 100644 --- a/airflow/models/renderedtifields.py +++ b/airflow/models/renderedtifields.py @@ -32,9 +32,7 @@ class RenderedTaskInstanceFields(Base): - """ - Save Rendered Template Fields - """ + """Save Rendered Template Fields""" __tablename__ = "rendered_task_instance_fields" diff --git a/airflow/models/skipmixin.py b/airflow/models/skipmixin.py index dc80990d4c0b6..43033310e1ae4 100644 --- a/airflow/models/skipmixin.py +++ b/airflow/models/skipmixin.py @@ -38,9 +38,7 @@ class SkipMixin(LoggingMixin): """A Mixin to skip Tasks Instances""" def _set_state_to_skipped(self, dag_run, execution_date, tasks, session): - """ - Used internally to set state of task instances to skipped from the same dag run. - """ + """Used internally to set state of task instances to skipped from the same dag run.""" task_ids = [d.task_id for d in tasks] now = timezone.utcnow() diff --git a/airflow/models/taskfail.py b/airflow/models/taskfail.py index d35f347644798..5b3979203d617 100644 --- a/airflow/models/taskfail.py +++ b/airflow/models/taskfail.py @@ -23,9 +23,7 @@ class TaskFail(Base): - """ - TaskFail tracks the failed run durations of each task instance. - """ + """TaskFail tracks the failed run durations of each task instance.""" __tablename__ = "task_fail" diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index ea22b01a8dfaf..14d46ed21269f 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -152,9 +152,7 @@ def clear_task_instances(tis, class TaskInstanceKey(NamedTuple): - """ - Key used to identify task instance. - """ + """Key used to identify task instance.""" dag_id: str task_id: str @@ -163,24 +161,18 @@ class TaskInstanceKey(NamedTuple): @property def primary(self) -> Tuple[str, str, datetime]: - """ - Return task instance primary key part of the key - """ + """Return task instance primary key part of the key""" return self.dag_id, self.task_id, self.execution_date @property def reduced(self) -> 'TaskInstanceKey': - """ - Remake the key by subtracting 1 from try number to match in memory information - """ + """Remake the key by subtracting 1 from try number to match in memory information""" return TaskInstanceKey( self.dag_id, self.task_id, self.execution_date, max(1, self.try_number - 1) ) def with_try_number(self, try_number: int) -> 'TaskInstanceKey': - """ - Returns TaskInstanceKey with provided ``try_number`` - """ + """Returns TaskInstanceKey with provided ``try_number``""" return TaskInstanceKey( self.dag_id, self.task_id, self.execution_date, try_number ) @@ -604,9 +596,7 @@ def clear_xcom_data(self, session=None): @property def key(self) -> TaskInstanceKey: - """ - Returns a tuple that identifies the task instance uniquely - """ + """Returns a tuple that identifies the task instance uniquely""" return TaskInstanceKey(self.dag_id, self.task_id, self.execution_date, self.try_number) @provide_session @@ -1152,9 +1142,7 @@ def _prepare_and_execute_task_with_callbacks( self, context, task): - """ - Prepare Task for Execution - """ + """Prepare Task for Execution""" from airflow.models.renderedtifields import RenderedTaskInstanceFields task_copy = task.prepare_for_execution() @@ -1822,9 +1810,7 @@ def get_num_running_task_instances(self, session): ).scalar() def init_run_context(self, raw=False): - """ - Sets the log context. - """ + """Sets the log context.""" self.raw = raw self._set_context(self) diff --git a/airflow/models/taskmixin.py b/airflow/models/taskmixin.py index cfdc714f824ce..f8ebcd7af529c 100644 --- a/airflow/models/taskmixin.py +++ b/airflow/models/taskmixin.py @@ -40,16 +40,12 @@ def leaves(self): @abstractmethod def set_upstream(self, other: Union["TaskMixin", Sequence["TaskMixin"]]): - """ - Set a task or a task list to be directly upstream from the current task. - """ + """Set a task or a task list to be directly upstream from the current task.""" raise NotImplementedError() @abstractmethod def set_downstream(self, other: Union["TaskMixin", Sequence["TaskMixin"]]): - """ - Set a task or a task list to be directly downstream from the current task. - """ + """Set a task or a task list to be directly downstream from the current task.""" raise NotImplementedError() def update_relative(self, other: "TaskMixin", upstream=True) -> None: @@ -59,29 +55,21 @@ def update_relative(self, other: "TaskMixin", upstream=True) -> None: """ def __lshift__(self, other: Union["TaskMixin", Sequence["TaskMixin"]]): - """ - Implements Task << Task - """ + """Implements Task << Task""" self.set_upstream(other) return other def __rshift__(self, other: Union["TaskMixin", Sequence["TaskMixin"]]): - """ - Implements Task >> Task - """ + """Implements Task >> Task""" self.set_downstream(other) return other def __rrshift__(self, other: Union["TaskMixin", Sequence["TaskMixin"]]): - """ - Called for Task >> [Task] because list don't have __rshift__ operators. - """ + """Called for Task >> [Task] because list don't have __rshift__ operators.""" self.__lshift__(other) return self def __rlshift__(self, other: Union["TaskMixin", Sequence["TaskMixin"]]): - """ - Called for Task << [Task] because list don't have __lshift__ operators. - """ + """Called for Task << [Task] because list don't have __lshift__ operators.""" self.__rshift__(other) return self diff --git a/airflow/models/taskreschedule.py b/airflow/models/taskreschedule.py index 03c5e029d55a2..88e18cdea73c2 100644 --- a/airflow/models/taskreschedule.py +++ b/airflow/models/taskreschedule.py @@ -24,9 +24,7 @@ class TaskReschedule(Base): - """ - TaskReschedule tracks rescheduled task instances. - """ + """TaskReschedule tracks rescheduled task instances.""" __tablename__ = "task_reschedule" diff --git a/airflow/models/variable.py b/airflow/models/variable.py index b3cc01bac1700..d180628443793 100644 --- a/airflow/models/variable.py +++ b/airflow/models/variable.py @@ -55,9 +55,7 @@ def __repr__(self): return '{} : {}'.format(self.key, self._val) def get_val(self): - """ - Get Airflow Variable from Metadata DB and decode it using the Fernet Key - """ + """Get Airflow Variable from Metadata DB and decode it using the Fernet Key""" if self._val is not None and self.is_encrypted: try: fernet = get_fernet() @@ -72,9 +70,7 @@ def get_val(self): return self._val def set_val(self, value): - """ - Encode the specified value with Fernet Key and store it in Variables Table. - """ + """Encode the specified value with Fernet Key and store it in Variables Table.""" if value is not None: fernet = get_fernet() self._val = fernet.encrypt(bytes(value, 'utf-8')).decode() @@ -82,9 +78,7 @@ def set_val(self, value): @declared_attr def val(cls): # pylint: disable=no-self-argument - """ - Get Airflow Variable from Metadata DB and decode it using the Fernet Key - """ + """Get Airflow Variable from Metadata DB and decode it using the Fernet Key""" return synonym('_val', descriptor=property(cls.get_val, cls.set_val)) @classmethod diff --git a/airflow/models/xcom.py b/airflow/models/xcom.py index 6b0068f1166ce..2b5c67eb9cbe6 100644 --- a/airflow/models/xcom.py +++ b/airflow/models/xcom.py @@ -43,9 +43,7 @@ class BaseXCom(Base, LoggingMixin): - """ - Base class for XCom objects. - """ + """Base class for XCom objects.""" __tablename__ = "xcom" diff --git a/airflow/models/xcom_arg.py b/airflow/models/xcom_arg.py index 70be294404d3f..aa29864fbe351 100644 --- a/airflow/models/xcom_arg.py +++ b/airflow/models/xcom_arg.py @@ -67,9 +67,7 @@ def __eq__(self, other): and self.key == other.key) def __getitem__(self, item): - """ - Implements xcomresult['some_result_key'] - """ + """Implements xcomresult['some_result_key']""" return XComArg(operator=self.operator, key=item) def __str__(self): @@ -113,15 +111,11 @@ def key(self) -> str: return self._key def set_upstream(self, task_or_task_list: Union[TaskMixin, Sequence[TaskMixin]]): - """ - Proxy to underlying operator set_upstream method. Required by TaskMixin. - """ + """Proxy to underlying operator set_upstream method. Required by TaskMixin.""" self.operator.set_upstream(task_or_task_list) def set_downstream(self, task_or_task_list: Union[TaskMixin, Sequence[TaskMixin]]): - """ - Proxy to underlying operator set_downstream method. Required by TaskMixin. - """ + """Proxy to underlying operator set_downstream method. Required by TaskMixin.""" self.operator.set_downstream(task_or_task_list) def resolve(self, context: Dict) -> Any: diff --git a/airflow/operators/dagrun_operator.py b/airflow/operators/dagrun_operator.py index dc234971c7e7c..d261bfc44cd2f 100644 --- a/airflow/operators/dagrun_operator.py +++ b/airflow/operators/dagrun_operator.py @@ -63,9 +63,7 @@ class TriggerDagRunOperator(BaseOperator): @property def operator_extra_links(self): - """ - Return operator extra links - """ + """Return operator extra links""" return [TriggerDagRunLink()] @apply_defaults diff --git a/airflow/operators/latest_only_operator.py b/airflow/operators/latest_only_operator.py index 7096aaba54813..af6c90e19397e 100644 --- a/airflow/operators/latest_only_operator.py +++ b/airflow/operators/latest_only_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.operators.latest_only`. -""" +"""This module is deprecated. Please use `airflow.operators.latest_only`""" import warnings # pylint: disable=unused-import diff --git a/airflow/operators/sql.py b/airflow/operators/sql.py index a9d8a371428d7..60408dd7bc7af 100644 --- a/airflow/operators/sql.py +++ b/airflow/operators/sql.py @@ -469,9 +469,7 @@ def push(self, meta_data): self.log.info("Log from %s:\n%s", self.dag_id, info) def get_db_hook(self): - """ - Returns DB hook - """ + """Returns DB hook""" return BaseHook.get_hook(conn_id=self.conn_id) diff --git a/airflow/operators/subdag_operator.py b/airflow/operators/subdag_operator.py index f9132c47fd284..96a18f7c5b193 100644 --- a/airflow/operators/subdag_operator.py +++ b/airflow/operators/subdag_operator.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -The module which provides a way to nest your DAGs and so your levels of complexity. -""" +"""The module which provides a way to nest your DAGs and so your levels of complexity.""" from enum import Enum from typing import Dict, Optional @@ -37,9 +35,7 @@ class SkippedStatePropagationOptions(Enum): - """ - Available options for skipped state propagation of subdag's tasks to parent dag tasks. - """ + """Available options for skipped state propagation of subdag's tasks to parent dag tasks.""" ALL_LEAVES = 'all_leaves' ANY_LEAF = 'any_leaf' diff --git a/airflow/plugins_manager.py b/airflow/plugins_manager.py index a7beeec7a88d5..f6fd08b6c0e2e 100644 --- a/airflow/plugins_manager.py +++ b/airflow/plugins_manager.py @@ -197,9 +197,7 @@ def load_entrypoint_plugins(): def load_plugins_from_plugin_directory(): - """ - Load and register Airflow Plugins from plugins directory - """ + """Load and register Airflow Plugins from plugins directory""" global import_errors # pylint: disable=global-statement global plugins # pylint: disable=global-statement log.debug("Loading plugins from directory: %s", settings.PLUGINS_FOLDER) diff --git a/airflow/providers/amazon/aws/hooks/athena.py b/airflow/providers/amazon/aws/hooks/athena.py index 3120108c6b876..c7037fbbfc90c 100644 --- a/airflow/providers/amazon/aws/hooks/athena.py +++ b/airflow/providers/amazon/aws/hooks/athena.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains AWS Athena hook -""" +"""This module contains AWS Athena hook""" from time import sleep from typing import Any, Dict, Optional diff --git a/airflow/providers/amazon/aws/hooks/cloud_formation.py b/airflow/providers/amazon/aws/hooks/cloud_formation.py index d09bd94b578b4..610cded6fec9b 100644 --- a/airflow/providers/amazon/aws/hooks/cloud_formation.py +++ b/airflow/providers/amazon/aws/hooks/cloud_formation.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains AWS CloudFormation Hook -""" +"""This module contains AWS CloudFormation Hook""" from typing import Optional, Union from botocore.exceptions import ClientError @@ -42,9 +40,7 @@ def __init__(self, *args, **kwargs): super().__init__(client_type='cloudformation', *args, **kwargs) def get_stack_status(self, stack_name: Union[client, resource]) -> Optional[dict]: - """ - Get stack status from CloudFormation. - """ + """Get stack status from CloudFormation.""" self.log.info('Poking for stack %s', stack_name) try: diff --git a/airflow/providers/amazon/aws/hooks/datasync.py b/airflow/providers/amazon/aws/hooks/datasync.py index 9f4efca9c14a7..e7c0e3aad167d 100644 --- a/airflow/providers/amazon/aws/hooks/datasync.py +++ b/airflow/providers/amazon/aws/hooks/datasync.py @@ -15,9 +15,7 @@ # specific language governing permissions and limitations # under the License. -""" -Interact with AWS DataSync, using the AWS ``boto3`` library. -""" +"""Interact with AWS DataSync, using the AWS ``boto3`` library.""" import time from typing import List, Optional diff --git a/airflow/providers/amazon/aws/hooks/dynamodb.py b/airflow/providers/amazon/aws/hooks/dynamodb.py index 475043622959b..a829f8de0a14f 100644 --- a/airflow/providers/amazon/aws/hooks/dynamodb.py +++ b/airflow/providers/amazon/aws/hooks/dynamodb.py @@ -17,9 +17,7 @@ # under the License. -""" -This module contains the AWS DynamoDB hook -""" +"""This module contains the AWS DynamoDB hook""" from typing import Iterable, List, Optional from airflow.exceptions import AirflowException @@ -51,9 +49,7 @@ def __init__( super().__init__(*args, **kwargs) def write_batch_data(self, items: Iterable) -> bool: - """ - Write batch items to DynamoDB table with provisioned throughout capacity. - """ + """Write batch items to DynamoDB table with provisioned throughout capacity.""" try: table = self.get_conn().Table(self.table_name) diff --git a/airflow/providers/amazon/aws/hooks/glacier.py b/airflow/providers/amazon/aws/hooks/glacier.py index 756b6e9e85113..8b2f239f162e5 100644 --- a/airflow/providers/amazon/aws/hooks/glacier.py +++ b/airflow/providers/amazon/aws/hooks/glacier.py @@ -22,9 +22,7 @@ class GlacierHook(AwsBaseHook): - """ - Hook for connection with Amazon Glacier - """ + """Hook for connection with Amazon Glacier""" def __init__(self, aws_conn_id: str = "aws_default") -> None: super().__init__(client_type="glacier") diff --git a/airflow/providers/amazon/aws/hooks/glue.py b/airflow/providers/amazon/aws/hooks/glue.py index dde6362f8b145..8bc2e72a0cd47 100644 --- a/airflow/providers/amazon/aws/hooks/glue.py +++ b/airflow/providers/amazon/aws/hooks/glue.py @@ -77,16 +77,12 @@ def __init__( super().__init__(*args, **kwargs) def list_jobs(self) -> List: - """ - :return: Lists of Jobs - """ + """:return: Lists of Jobs""" conn = self.get_conn() return conn.get_jobs() def get_iam_execution_role(self) -> Dict: - """ - :return: iam role for job execution - """ + """:return: iam role for job execution""" iam_client = self.get_client_type('iam', self.region_name) try: diff --git a/airflow/providers/amazon/aws/hooks/glue_catalog.py b/airflow/providers/amazon/aws/hooks/glue_catalog.py index cdb7aab8a4205..65d44666bc0f0 100644 --- a/airflow/providers/amazon/aws/hooks/glue_catalog.py +++ b/airflow/providers/amazon/aws/hooks/glue_catalog.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains AWS Glue Catalog Hook -""" +"""This module contains AWS Glue Catalog Hook""" from typing import Set, Optional from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook diff --git a/airflow/providers/amazon/aws/hooks/kinesis.py b/airflow/providers/amazon/aws/hooks/kinesis.py index 7735de82cfb1c..fc4b9afebb4fd 100644 --- a/airflow/providers/amazon/aws/hooks/kinesis.py +++ b/airflow/providers/amazon/aws/hooks/kinesis.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains AWS Firehose hook -""" +"""This module contains AWS Firehose hook""" from typing import Iterable from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook @@ -44,9 +42,7 @@ def __init__(self, delivery_stream: str, *args, **kwargs) -> None: super().__init__(*args, **kwargs) def put_records(self, records: Iterable): - """ - Write batch records to Kinesis Firehose - """ + """Write batch records to Kinesis Firehose""" response = self.get_conn().put_record_batch(DeliveryStreamName=self.delivery_stream, Records=records) return response diff --git a/airflow/providers/amazon/aws/hooks/lambda_function.py b/airflow/providers/amazon/aws/hooks/lambda_function.py index 8b6ad50911bfa..6fedebef5cea3 100644 --- a/airflow/providers/amazon/aws/hooks/lambda_function.py +++ b/airflow/providers/amazon/aws/hooks/lambda_function.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains AWS Lambda hook -""" +"""This module contains AWS Lambda hook""" from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook @@ -59,9 +57,7 @@ def __init__( super().__init__(*args, **kwargs) def invoke_lambda(self, payload: str) -> str: - """ - Invoke Lambda Function - """ + """Invoke Lambda Function""" response = self.conn.invoke( FunctionName=self.function_name, InvocationType=self.invocation_type, diff --git a/airflow/providers/amazon/aws/hooks/redshift.py b/airflow/providers/amazon/aws/hooks/redshift.py index c97708d5347ad..10157ebb0ef9c 100644 --- a/airflow/providers/amazon/aws/hooks/redshift.py +++ b/airflow/providers/amazon/aws/hooks/redshift.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Interact with AWS Redshift, using the boto3 library. -""" +"""Interact with AWS Redshift, using the boto3 library.""" from typing import List, Optional diff --git a/airflow/providers/amazon/aws/hooks/s3.py b/airflow/providers/amazon/aws/hooks/s3.py index 26713acee5229..af403dcf7cede 100644 --- a/airflow/providers/amazon/aws/hooks/s3.py +++ b/airflow/providers/amazon/aws/hooks/s3.py @@ -17,9 +17,7 @@ # under the License. # pylint: disable=invalid-name -""" -Interact with AWS S3, using the boto3 library. -""" +"""Interact with AWS S3, using the boto3 library.""" import fnmatch import gzip as gz import io diff --git a/airflow/providers/amazon/aws/hooks/sagemaker.py b/airflow/providers/amazon/aws/hooks/sagemaker.py index fdf934317c225..9009967f8ee5f 100644 --- a/airflow/providers/amazon/aws/hooks/sagemaker.py +++ b/airflow/providers/amazon/aws/hooks/sagemaker.py @@ -584,9 +584,7 @@ def describe_training_job_with_log( last_description, last_describe_job_call, ): - """ - Return the training job info associated with job_name and print CloudWatch logs - """ + """Return the training job info associated with job_name and print CloudWatch logs""" log_group = '/aws/sagemaker/TrainingJobs' if len(stream_names) < instance_count: diff --git a/airflow/providers/amazon/aws/hooks/ses.py b/airflow/providers/amazon/aws/hooks/ses.py index 3844b711bfcbe..5c7423f48377e 100644 --- a/airflow/providers/amazon/aws/hooks/ses.py +++ b/airflow/providers/amazon/aws/hooks/ses.py @@ -14,10 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - -""" -This module contains AWS SES Hook -""" +"""This module contains AWS SES Hook""" from typing import Any, Dict, Iterable, List, Optional, Union from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook diff --git a/airflow/providers/amazon/aws/hooks/sns.py b/airflow/providers/amazon/aws/hooks/sns.py index a019d5b25ab85..795e78a961723 100644 --- a/airflow/providers/amazon/aws/hooks/sns.py +++ b/airflow/providers/amazon/aws/hooks/sns.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains AWS SNS hook -""" +"""This module contains AWS SNS hook""" import json from typing import Optional, Union, Dict diff --git a/airflow/providers/amazon/aws/hooks/sqs.py b/airflow/providers/amazon/aws/hooks/sqs.py index c40a432b0fbd0..97fdff01d1d3a 100644 --- a/airflow/providers/amazon/aws/hooks/sqs.py +++ b/airflow/providers/amazon/aws/hooks/sqs.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains AWS SQS hook -""" +"""This module contains AWS SQS hook""" from typing import Dict, Optional from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook diff --git a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py index 7d4e3a0703bde..5305784c6c07b 100644 --- a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +++ b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py @@ -50,9 +50,7 @@ def __init__(self, base_log_folder, log_group_arn, filename_template): @cached_property def hook(self): - """ - Returns AwsLogsHook. - """ + """Returns AwsLogsHook.""" remote_conn_id = conf.get('logging', 'REMOTE_LOG_CONN_ID') try: from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook @@ -79,9 +77,7 @@ def set_context(self, ti): ) def close(self): - """ - Close the handler responsible for the upload of the local log file to Cloudwatch. - """ + """Close the handler responsible for the upload of the local log file to Cloudwatch.""" # When application exit, system shuts down all handlers by # calling close method. Here we check if logger is already # closed to prevent uploading the log to remote storage multiple diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py b/airflow/providers/amazon/aws/log/s3_task_handler.py index 00f52d1b28666..8b32a2fa441aa 100644 --- a/airflow/providers/amazon/aws/log/s3_task_handler.py +++ b/airflow/providers/amazon/aws/log/s3_task_handler.py @@ -41,9 +41,7 @@ def __init__(self, base_log_folder, s3_log_folder, filename_template): @cached_property def hook(self): - """ - Returns S3Hook. - """ + """Returns S3Hook.""" remote_conn_id = conf.get('logging', 'REMOTE_LOG_CONN_ID') try: from airflow.providers.amazon.aws.hooks.s3 import S3Hook @@ -71,9 +69,7 @@ def set_context(self, ti): pass def close(self): - """ - Close and upload local log file to remote storage S3. - """ + """Close and upload local log file to remote storage S3.""" # When application exit, system shuts down all handlers by # calling close method. Here we check if logger is already # closed to prevent uploading the log to remote storage multiple diff --git a/airflow/providers/amazon/aws/operators/athena.py b/airflow/providers/amazon/aws/operators/athena.py index 7a8bc2af8c585..101ea169cace2 100644 --- a/airflow/providers/amazon/aws/operators/athena.py +++ b/airflow/providers/amazon/aws/operators/athena.py @@ -91,9 +91,7 @@ def hook(self) -> AWSAthenaHook: return AWSAthenaHook(self.aws_conn_id, sleep_time=self.sleep_time) def execute(self, context: dict) -> Optional[str]: - """ - Run Presto Query on Athena - """ + """Run Presto Query on Athena""" self.query_execution_context['Database'] = self.database self.result_configuration['OutputLocation'] = self.output_location self.query_execution_id = self.hook.run_query( @@ -123,9 +121,7 @@ def execute(self, context: dict) -> Optional[str]: return self.query_execution_id def on_kill(self) -> None: - """ - Cancel the submitted athena query - """ + """Cancel the submitted athena query""" if self.query_execution_id: self.log.info('Received a kill signal.') self.log.info('Stopping Query with executionId - %s', self.query_execution_id) diff --git a/airflow/providers/amazon/aws/operators/cloud_formation.py b/airflow/providers/amazon/aws/operators/cloud_formation.py index d6c9bb01dc57a..d6fa6549832d9 100644 --- a/airflow/providers/amazon/aws/operators/cloud_formation.py +++ b/airflow/providers/amazon/aws/operators/cloud_formation.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains CloudFormation create/delete stack operators. -""" +"""This module contains CloudFormation create/delete stack operators.""" from typing import List from airflow.models import BaseOperator diff --git a/airflow/providers/amazon/aws/operators/datasync.py b/airflow/providers/amazon/aws/operators/datasync.py index bc0bdf62d2e62..9f2e9b3373e43 100644 --- a/airflow/providers/amazon/aws/operators/datasync.py +++ b/airflow/providers/amazon/aws/operators/datasync.py @@ -15,9 +15,7 @@ # specific language governing permissions and limitations # under the License. -""" -Create, get, update, execute and delete an AWS DataSync Task. -""" +"""Create, get, update, execute and delete an AWS DataSync Task.""" import logging import random diff --git a/airflow/providers/amazon/aws/operators/s3_bucket.py b/airflow/providers/amazon/aws/operators/s3_bucket.py index 48207468e19ec..14d25cc6ba7d6 100644 --- a/airflow/providers/amazon/aws/operators/s3_bucket.py +++ b/airflow/providers/amazon/aws/operators/s3_bucket.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains AWS S3 operators. -""" +"""This module contains AWS S3 operators.""" from typing import Optional from airflow.models import BaseOperator diff --git a/airflow/providers/amazon/aws/secrets/secrets_manager.py b/airflow/providers/amazon/aws/secrets/secrets_manager.py index 01204301428f2..e4786c52603e8 100644 --- a/airflow/providers/amazon/aws/secrets/secrets_manager.py +++ b/airflow/providers/amazon/aws/secrets/secrets_manager.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Objects relating to sourcing secrets from AWS Secrets Manager -""" +"""Objects relating to sourcing secrets from AWS Secrets Manager""" from typing import Optional @@ -82,9 +80,7 @@ def __init__( @cached_property def client(self): - """ - Create a Secrets Manager client - """ + """Create a Secrets Manager client""" session = boto3.session.Session( profile_name=self.profile_name, ) diff --git a/airflow/providers/amazon/aws/secrets/systems_manager.py b/airflow/providers/amazon/aws/secrets/systems_manager.py index 442845c9bfb68..c7b59ff5cb76f 100644 --- a/airflow/providers/amazon/aws/secrets/systems_manager.py +++ b/airflow/providers/amazon/aws/secrets/systems_manager.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Objects relating to sourcing connections from AWS SSM Parameter Store -""" +"""Objects relating to sourcing connections from AWS SSM Parameter Store""" from typing import Optional import boto3 @@ -71,9 +69,7 @@ def __init__( @cached_property def client(self): - """ - Create a SSM client - """ + """Create a SSM client""" session = boto3.Session(profile_name=self.profile_name) return session.client("ssm", **self.kwargs) diff --git a/airflow/providers/amazon/aws/sensors/cloud_formation.py b/airflow/providers/amazon/aws/sensors/cloud_formation.py index 739a13331c570..5c66a0c3a2af2 100644 --- a/airflow/providers/amazon/aws/sensors/cloud_formation.py +++ b/airflow/providers/amazon/aws/sensors/cloud_formation.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains sensors for AWS CloudFormation. -""" +"""This module contains sensors for AWS CloudFormation.""" from airflow.providers.amazon.aws.hooks.cloud_formation import AWSCloudFormationHook from airflow.sensors.base_sensor_operator import BaseSensorOperator from airflow.utils.decorators import apply_defaults diff --git a/airflow/providers/amazon/aws/sensors/glacier.py b/airflow/providers/amazon/aws/sensors/glacier.py index 100af69bf19e5..35548bf31627b 100644 --- a/airflow/providers/amazon/aws/sensors/glacier.py +++ b/airflow/providers/amazon/aws/sensors/glacier.py @@ -25,9 +25,7 @@ class JobStatus(Enum): - """ - Glacier jobs description - """ + """Glacier jobs description""" IN_PROGRESS = "InProgress" SUCCEEDED = "Succeeded" diff --git a/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py b/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py index f1df94d3c0663..72926267b61c5 100644 --- a/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +++ b/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py @@ -76,9 +76,7 @@ def __init__( self.hook = None def poke(self, context): - """ - Checks for existence of the partition in the AWS Glue Catalog table - """ + """Checks for existence of the partition in the AWS Glue Catalog table""" if '.' in self.table_name: self.database_name, self.table_name = self.table_name.split('.') self.log.info( @@ -88,9 +86,7 @@ def poke(self, context): return self.get_hook().check_for_partition(self.database_name, self.table_name, self.expression) def get_hook(self): - """ - Gets the AwsGlueCatalogHook - """ + """Gets the AwsGlueCatalogHook""" if not self.hook: self.hook = AwsGlueCatalogHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name) return self.hook diff --git a/airflow/providers/amazon/aws/sensors/s3_keys_unchanged.py b/airflow/providers/amazon/aws/sensors/s3_keys_unchanged.py index f1f3d4e00bf6c..2c9ff9970546a 100644 --- a/airflow/providers/amazon/aws/sensors/s3_keys_unchanged.py +++ b/airflow/providers/amazon/aws/sensors/s3_keys_unchanged.py @@ -103,9 +103,7 @@ def __init__( @cached_property def hook(self): - """ - Returns S3Hook. - """ + """Returns S3Hook.""" return S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify) def is_keys_unchanged(self, current_objects: Set[str]) -> bool: diff --git a/airflow/providers/amazon/aws/sensors/sqs.py b/airflow/providers/amazon/aws/sensors/sqs.py index 2d1ab54ad1d3d..b6c467da4b160 100644 --- a/airflow/providers/amazon/aws/sensors/sqs.py +++ b/airflow/providers/amazon/aws/sensors/sqs.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Reads and then deletes the message from SQS queue -""" +"""Reads and then deletes the message from SQS queue""" from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.sqs import SQSHook diff --git a/airflow/providers/amazon/aws/transfers/gcs_to_s3.py b/airflow/providers/amazon/aws/transfers/gcs_to_s3.py index d474f310c2b12..48559f8588779 100644 --- a/airflow/providers/amazon/aws/transfers/gcs_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/gcs_to_s3.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Cloud Storage to S3 operator. -""" +"""This module contains Google Cloud Storage to S3 operator.""" import warnings from typing import Iterable, Optional, Sequence, Union, Dict, List, cast diff --git a/airflow/providers/amazon/aws/transfers/google_api_to_s3.py b/airflow/providers/amazon/aws/transfers/google_api_to_s3.py index a8ee7db14d030..4e44d97cacc0c 100644 --- a/airflow/providers/amazon/aws/transfers/google_api_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/google_api_to_s3.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -This module allows you to transfer data from any Google API endpoint into a S3 Bucket. -""" +"""This module allows you to transfer data from any Google API endpoint into a S3 Bucket.""" import json import sys from typing import Optional, Sequence, Union diff --git a/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py index 9a897198155fa..16568dba9ddf8 100644 --- a/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +++ b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains operator to move data from Hive to DynamoDB. -""" +"""This module contains operator to move data from Hive to DynamoDB.""" import json from typing import Optional, Callable diff --git a/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py b/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py index b303d42aef4da..015e7342c79ef 100644 --- a/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module allows you to transfer mail attachments from a mail server into s3 bucket. -""" +"""This module allows you to transfer mail attachments from a mail server into s3 bucket.""" from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.s3 import S3Hook from airflow.providers.imap.hooks.imap import ImapHook diff --git a/airflow/providers/amazon/aws/transfers/mongo_to_s3.py b/airflow/providers/amazon/aws/transfers/mongo_to_s3.py index 58edc93ae201e..b88087e291a51 100644 --- a/airflow/providers/amazon/aws/transfers/mongo_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/mongo_to_s3.py @@ -72,9 +72,7 @@ def __init__( self.replace = replace def execute(self, context) -> bool: - """ - Executed by task_instance at runtime - """ + """Executed by task_instance at runtime""" s3_conn = S3Hook(self.s3_conn_id) # Grab collection and execute query according to whether or not it is a pipeline diff --git a/airflow/providers/amazon/aws/transfers/mysql_to_s3.py b/airflow/providers/amazon/aws/transfers/mysql_to_s3.py index 791a31d2b4c21..45b78f024c449 100644 --- a/airflow/providers/amazon/aws/transfers/mysql_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/mysql_to_s3.py @@ -102,9 +102,7 @@ def __init__( self.pd_csv_kwargs["header"] = header def _fix_int_dtypes(self, df: pd.DataFrame) -> None: - """ - Mutate DataFrame to set dtypes for int columns containing NaN values." - """ + """Mutate DataFrame to set dtypes for int columns containing NaN values.""" for col in df: if "float" in df[col].dtype.name and df[col].hasnans: # inspect values to determine if dtype of non-null values is int or float diff --git a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py index 9bc97936a6682..e2a4cc1314605 100644 --- a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Transfers data from AWS Redshift into a S3 Bucket. -""" +"""Transfers data from AWS Redshift into a S3 Bucket.""" from typing import List, Optional, Union from airflow.models import BaseOperator diff --git a/airflow/providers/apache/cassandra/hooks/cassandra.py b/airflow/providers/apache/cassandra/hooks/cassandra.py index 9e52f08aea28d..0166aa376a9b0 100644 --- a/airflow/providers/apache/cassandra/hooks/cassandra.py +++ b/airflow/providers/apache/cassandra/hooks/cassandra.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains hook to integrate with Apache Cassandra. -""" +"""This module contains hook to integrate with Apache Cassandra.""" from typing import Any, Dict, Union @@ -122,24 +120,18 @@ def __init__(self, cassandra_conn_id: str = 'cassandra_default'): self.session = None def get_conn(self) -> Session: - """ - Returns a cassandra Session object - """ + """Returns a cassandra Session object""" if self.session and not self.session.is_shutdown: return self.session self.session = self.cluster.connect(self.keyspace) return self.session def get_cluster(self) -> Cluster: - """ - Returns Cassandra cluster. - """ + """Returns Cassandra cluster.""" return self.cluster def shutdown_cluster(self) -> None: - """ - Closes all sessions and connections associated with this Cluster. - """ + """Closes all sessions and connections associated with this Cluster.""" if not self.cluster.is_shutdown: self.cluster.shutdown() diff --git a/airflow/providers/apache/druid/hooks/druid.py b/airflow/providers/apache/druid/hooks/druid.py index b609c4aafe661..04add1c58ab87 100644 --- a/airflow/providers/apache/druid/hooks/druid.py +++ b/airflow/providers/apache/druid/hooks/druid.py @@ -62,9 +62,7 @@ def __init__( raise ValueError("Druid timeout should be equal or greater than 1") def get_conn_url(self) -> str: - """ - Get Druid connection url - """ + """Get Druid connection url""" conn = self.get_connection(self.druid_ingest_conn_id) host = conn.host port = conn.port @@ -89,9 +87,7 @@ def get_auth(self) -> Optional[requests.auth.HTTPBasicAuth]: return None def submit_indexing_job(self, json_index_spec: Dict[str, Any]) -> None: - """ - Submit Druid ingestion job - """ + """Submit Druid ingestion job""" url = self.get_conn_url() self.log.info("Druid ingestion spec: %s", json_index_spec) @@ -147,9 +143,7 @@ class DruidDbApiHook(DbApiHook): supports_autocommit = False def get_conn(self) -> connect: - """ - Establish a connection to druid broker. - """ + """Establish a connection to druid broker.""" conn = self.get_connection(self.conn_name_attr) druid_broker_conn = connect( host=conn.host, diff --git a/airflow/providers/apache/druid/operators/druid_check.py b/airflow/providers/apache/druid/operators/druid_check.py index 12637880ac7cc..47da4544f4d76 100644 --- a/airflow/providers/apache/druid/operators/druid_check.py +++ b/airflow/providers/apache/druid/operators/druid_check.py @@ -65,9 +65,7 @@ def __init__( self.sql = sql def get_db_hook(self) -> DruidDbApiHook: - """ - Return the druid db api hook. - """ + """Return the druid db api hook.""" return DruidDbApiHook(druid_broker_conn_id=self.druid_broker_conn_id) def get_first(self, sql: str) -> Any: diff --git a/airflow/providers/apache/druid/transfers/hive_to_druid.py b/airflow/providers/apache/druid/transfers/hive_to_druid.py index a1f14b6f81c86..99d4f4deae311 100644 --- a/airflow/providers/apache/druid/transfers/hive_to_druid.py +++ b/airflow/providers/apache/druid/transfers/hive_to_druid.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains operator to move data from Hive to Druid. -""" +"""This module contains operator to move data from Hive to Druid.""" from typing import Any, Dict, List, Optional diff --git a/airflow/providers/apache/hdfs/hooks/hdfs.py b/airflow/providers/apache/hdfs/hooks/hdfs.py index e13a5c7cb5d85..48c3df888017c 100644 --- a/airflow/providers/apache/hdfs/hooks/hdfs.py +++ b/airflow/providers/apache/hdfs/hooks/hdfs.py @@ -62,9 +62,7 @@ def __init__( self.autoconfig = autoconfig def get_conn(self) -> Any: - """ - Returns a snakebite HDFSClient object. - """ + """Returns a snakebite HDFSClient object.""" # When using HAClient, proxy_user must be the same, so is ok to always # take the first. effective_user = self.proxy_user diff --git a/airflow/providers/apache/hdfs/sensors/hdfs.py b/airflow/providers/apache/hdfs/sensors/hdfs.py index d7235dcabd1d2..a011e5e38cae6 100644 --- a/airflow/providers/apache/hdfs/sensors/hdfs.py +++ b/airflow/providers/apache/hdfs/sensors/hdfs.py @@ -29,9 +29,7 @@ class HdfsSensor(BaseSensorOperator): - """ - Waits for a file or folder to land in HDFS - """ + """Waits for a file or folder to land in HDFS""" template_fields = ('filepath',) ui_color = settings.WEB_COLORS['LIGHTBLUE'] @@ -124,9 +122,7 @@ def poke(self, context: Dict[Any, Any]) -> bool: class HdfsRegexSensor(HdfsSensor): - """ - Waits for matching files by matching on regex - """ + """Waits for matching files by matching on regex""" def __init__(self, regex: Pattern[str], *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -153,9 +149,7 @@ def poke(self, context: Dict[Any, Any]) -> bool: class HdfsFolderSensor(HdfsSensor): - """ - Waits for a non-empty directory - """ + """Waits for a non-empty directory""" def __init__(self, be_empty: bool = False, *args: Any, **kwargs: Any): super().__init__(*args, **kwargs) diff --git a/airflow/providers/apache/hdfs/sensors/web_hdfs.py b/airflow/providers/apache/hdfs/sensors/web_hdfs.py index edc3c8b560553..923a4b9d52f2e 100644 --- a/airflow/providers/apache/hdfs/sensors/web_hdfs.py +++ b/airflow/providers/apache/hdfs/sensors/web_hdfs.py @@ -22,9 +22,7 @@ class WebHdfsSensor(BaseSensorOperator): - """ - Waits for a file or folder to land in HDFS - """ + """Waits for a file or folder to land in HDFS""" template_fields = ('filepath',) diff --git a/airflow/providers/apache/hive/hooks/hive.py b/airflow/providers/apache/hive/hooks/hive.py index 3d7784fbfdcfd..912a219739600 100644 --- a/airflow/providers/apache/hive/hooks/hive.py +++ b/airflow/providers/apache/hive/hooks/hive.py @@ -108,9 +108,7 @@ def __init__( self.mapred_job_name = mapred_job_name def _get_proxy_user(self) -> str: - """ - This function set the proper proxy_user value in case the user overwrite the default. - """ + """This function set the proper proxy_user value in case the user overwrite the default.""" conn = self.conn proxy_user_value: str = conn.extra_dejson.get('proxy_user', "") @@ -123,9 +121,7 @@ def _get_proxy_user(self) -> str: return proxy_user_value # The default proxy user (undefined) def _prepare_cli_cmd(self) -> List[Any]: - """ - This function creates the command list from available information - """ + """This function creates the command list from available information""" conn = self.conn hive_bin = 'hive' cmd_extra = [] @@ -265,10 +261,7 @@ def run_cli( return stdout def test_hql(self, hql: Union[str, Text]) -> None: - """ - Test an hql statement using the hive cli and EXPLAIN - - """ + """Test an hql statement using the hive cli and EXPLAIN""" create, insert, other = [], [], [] for query in hql.split(';'): # naive query_original = query @@ -462,9 +455,7 @@ def load_file( self.run_cli(hql) def kill(self) -> None: - """ - Kill Hive cli command - """ + """Kill Hive cli command""" if hasattr(self, 'sp'): if self.sub_process.poll() is None: print("Killing the Hive job") @@ -496,9 +487,7 @@ def __setstate__(self, d: Dict[str, Any]) -> None: self.__dict__['metastore'] = self.get_metastore_client() def get_metastore_client(self) -> Any: - """ - Returns a Hive thrift client. - """ + """Returns a Hive thrift client.""" import hmsclient from thrift.protocol import TBinaryProtocol from thrift.transport import TSocket, TTransport @@ -616,17 +605,13 @@ def get_table(self, table_name: str, db: str = 'default') -> Any: return client.get_table(dbname=db, tbl_name=table_name) def get_tables(self, db: str, pattern: str = '*') -> Any: - """ - Get a metastore table object - """ + """Get a metastore table object""" with self.metastore as client: tables = client.get_tables(db_name=db, pattern=pattern) return client.get_table_objects_by_name(db, tables) def get_databases(self, pattern: str = '*') -> Any: - """ - Get a metastore table object - """ + """Get a metastore table object""" with self.metastore as client: return client.get_databases(pattern) @@ -827,9 +812,7 @@ class HiveServer2Hook(DbApiHook): supports_autocommit = False def get_conn(self, schema: Optional[str] = None) -> Any: - """ - Returns a Hive connection object. - """ + """Returns a Hive connection object.""" username: Optional[str] = None # pylint: disable=no-member db = self.get_connection(self.hiveserver2_conn_id) # type: ignore diff --git a/airflow/providers/apache/hive/operators/hive.py b/airflow/providers/apache/hive/operators/hive.py index 1db5e9908a63c..598760fdd6783 100644 --- a/airflow/providers/apache/hive/operators/hive.py +++ b/airflow/providers/apache/hive/operators/hive.py @@ -120,9 +120,7 @@ def __init__( self.hook: Optional[HiveCliHook] = None def get_hook(self) -> HiveCliHook: - """ - Get Hive cli hook - """ + """Get Hive cli hook""" return HiveCliHook( hive_cli_conn_id=self.hive_cli_conn_id, run_as=self.run_as, @@ -172,9 +170,7 @@ def on_kill(self) -> None: self.hook.kill() def clear_airflow_vars(self) -> None: - """ - Reset airflow environment variables to prevent existing ones from impacting behavior. - """ + """Reset airflow environment variables to prevent existing ones from impacting behavior.""" blank_env_vars = { value['env_var_format']: '' for value in operator_helpers.AIRFLOW_VAR_NAME_FORMAT_MAPPING.values() } diff --git a/airflow/providers/apache/hive/operators/hive_stats.py b/airflow/providers/apache/hive/operators/hive_stats.py index fceaa0f0c06a3..30beccab5548e 100644 --- a/airflow/providers/apache/hive/operators/hive_stats.py +++ b/airflow/providers/apache/hive/operators/hive_stats.py @@ -97,9 +97,7 @@ def __init__( self.dttm = '{{ execution_date.isoformat() }}' def get_default_exprs(self, col: str, col_type: str) -> Dict[Any, Any]: - """ - Get default expressions - """ + """Get default expressions""" if col in self.excluded_columns: return {} exp = {(col, 'non_null'): f"COUNT({col})"} diff --git a/airflow/providers/apache/hive/transfers/hive_to_mysql.py b/airflow/providers/apache/hive/transfers/hive_to_mysql.py index 11d81ec837f7f..f8856c020b818 100644 --- a/airflow/providers/apache/hive/transfers/hive_to_mysql.py +++ b/airflow/providers/apache/hive/transfers/hive_to_mysql.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains operator to move data from Hive to MySQL. -""" +"""This module contains operator to move data from Hive to MySQL.""" from tempfile import NamedTemporaryFile from typing import Dict, Optional diff --git a/airflow/providers/apache/hive/transfers/hive_to_samba.py b/airflow/providers/apache/hive/transfers/hive_to_samba.py index dc93297475f6c..089ada938a652 100644 --- a/airflow/providers/apache/hive/transfers/hive_to_samba.py +++ b/airflow/providers/apache/hive/transfers/hive_to_samba.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains operator to move data from Hive to Samba. -""" +"""This module contains operator to move data from Hive to Samba.""" from tempfile import NamedTemporaryFile diff --git a/airflow/providers/apache/hive/transfers/mssql_to_hive.py b/airflow/providers/apache/hive/transfers/mssql_to_hive.py index 8f32ca28139f7..5079ff3e7dc41 100644 --- a/airflow/providers/apache/hive/transfers/mssql_to_hive.py +++ b/airflow/providers/apache/hive/transfers/mssql_to_hive.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains operator to move data from MSSQL to Hive. -""" +"""This module contains operator to move data from MSSQL to Hive.""" from collections import OrderedDict from tempfile import NamedTemporaryFile @@ -104,9 +102,7 @@ def __init__( @classmethod def type_map(cls, mssql_type: int) -> str: - """ - Maps MsSQL type to Hive type. - """ + """Maps MsSQL type to Hive type.""" map_dict = { pymssql.BINARY.value: 'INT', # pylint: disable=c-extension-no-member pymssql.DECIMAL.value: 'FLOAT', # pylint: disable=c-extension-no-member diff --git a/airflow/providers/apache/hive/transfers/mysql_to_hive.py b/airflow/providers/apache/hive/transfers/mysql_to_hive.py index 25aa802ddb080..a1a6dfa90f805 100644 --- a/airflow/providers/apache/hive/transfers/mysql_to_hive.py +++ b/airflow/providers/apache/hive/transfers/mysql_to_hive.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains operator to move data from MySQL to Druid. -""" +"""This module contains operator to move data from MySQL to Druid.""" from collections import OrderedDict from tempfile import NamedTemporaryFile @@ -118,9 +116,7 @@ def __init__( # pylint: disable=too-many-arguments @classmethod def type_map(cls, mysql_type: int) -> str: - """ - Maps MySQL type to Hive type. - """ + """Maps MySQL type to Hive type.""" types = MySQLdb.constants.FIELD_TYPE type_map = { types.BIT: 'INT', diff --git a/airflow/providers/apache/hive/transfers/s3_to_hive.py b/airflow/providers/apache/hive/transfers/s3_to_hive.py index 844777e40d869..ae36ef718e03f 100644 --- a/airflow/providers/apache/hive/transfers/s3_to_hive.py +++ b/airflow/providers/apache/hive/transfers/s3_to_hive.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains operator to move data from Hive to S3 bucket. -""" +"""This module contains operator to move data from Hive to S3 bucket.""" import bz2 import gzip diff --git a/airflow/providers/apache/hive/transfers/vertica_to_hive.py b/airflow/providers/apache/hive/transfers/vertica_to_hive.py index 66c9790b0588c..aea700f00ec7a 100644 --- a/airflow/providers/apache/hive/transfers/vertica_to_hive.py +++ b/airflow/providers/apache/hive/transfers/vertica_to_hive.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains operator to move data from Vertica to Hive. -""" +"""This module contains operator to move data from Vertica to Hive.""" from collections import OrderedDict from tempfile import NamedTemporaryFile diff --git a/airflow/providers/apache/livy/hooks/livy.py b/airflow/providers/apache/livy/hooks/livy.py index 1d614c07ec247..bba96078dff6d 100644 --- a/airflow/providers/apache/livy/hooks/livy.py +++ b/airflow/providers/apache/livy/hooks/livy.py @@ -15,9 +15,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains the Apache Livy hook. -""" +"""This module contains the Apache Livy hook.""" import json import re from enum import Enum @@ -31,9 +29,7 @@ class BatchState(Enum): - """ - Batch session states - """ + """Batch session states""" NOT_STARTED = 'not_started' STARTING = 'starting' diff --git a/airflow/providers/apache/livy/operators/livy.py b/airflow/providers/apache/livy/operators/livy.py index cbaaec2cffbd9..154f562c56c32 100644 --- a/airflow/providers/apache/livy/operators/livy.py +++ b/airflow/providers/apache/livy/operators/livy.py @@ -15,9 +15,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains the Apache Livy operator. -""" +"""This module contains the Apache Livy operator.""" from time import sleep from typing import Any, Dict, Optional, Sequence, Union @@ -165,8 +163,6 @@ def on_kill(self) -> None: self.kill() def kill(self) -> None: - """ - Delete the current batch session. - """ + """Delete the current batch session.""" if self._batch_id is not None: self.get_hook().delete_batch(self._batch_id) diff --git a/airflow/providers/apache/livy/sensors/livy.py b/airflow/providers/apache/livy/sensors/livy.py index ba29b7f940344..bd695eb6c5fba 100644 --- a/airflow/providers/apache/livy/sensors/livy.py +++ b/airflow/providers/apache/livy/sensors/livy.py @@ -15,9 +15,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains the Apache Livy sensor. -""" +"""This module contains the Apache Livy sensor.""" from typing import Any, Dict, Optional, Union from airflow.providers.apache.livy.hooks.livy import LivyHook diff --git a/airflow/providers/apache/pig/hooks/pig.py b/airflow/providers/apache/pig/hooks/pig.py index 4b08defe2874f..c8e39d8b73460 100644 --- a/airflow/providers/apache/pig/hooks/pig.py +++ b/airflow/providers/apache/pig/hooks/pig.py @@ -87,9 +87,7 @@ def run_cli(self, pig: str, pig_opts: Optional[str] = None, verbose: bool = True return stdout def kill(self) -> None: - """ - Kill Pig job - """ + """Kill Pig job""" if self.sub_process: if self.sub_process.poll() is None: self.log.info("Killing the Pig job") diff --git a/airflow/providers/apache/pinot/hooks/pinot.py b/airflow/providers/apache/pinot/hooks/pinot.py index 90de7d6b8462d..0ba8b8f224830 100644 --- a/airflow/providers/apache/pinot/hooks/pinot.py +++ b/airflow/providers/apache/pinot/hooks/pinot.py @@ -129,9 +129,7 @@ def create_segment( post_creation_verification: Optional[str] = None, retry: Optional[str] = None, ) -> Any: - """ - Create Pinot segment by run CreateSegment command - """ + """Create Pinot segment by run CreateSegment command""" cmd = ["CreateSegment"] if generator_config_file: @@ -252,18 +250,14 @@ def run_cli(self, cmd: List[str], verbose: bool = True) -> str: class PinotDbApiHook(DbApiHook): - """ - Connect to pinot db (https://github.com/apache/incubator-pinot) to issue pql - """ + """Connect to pinot db (https://github.com/apache/incubator-pinot) to issue pql""" conn_name_attr = 'pinot_broker_conn_id' default_conn_name = 'pinot_broker_default' supports_autocommit = False def get_conn(self) -> Any: - """ - Establish a connection to pinot broker through pinot dbapi. - """ + """Establish a connection to pinot broker through pinot dbapi.""" # pylint: disable=no-member conn = self.get_connection(self.pinot_broker_conn_id) # type: ignore # pylint: enable=no-member diff --git a/airflow/providers/apache/spark/hooks/spark_jdbc.py b/airflow/providers/apache/spark/hooks/spark_jdbc.py index 2100b6432136c..a2b18ae4e30cd 100644 --- a/airflow/providers/apache/spark/hooks/spark_jdbc.py +++ b/airflow/providers/apache/spark/hooks/spark_jdbc.py @@ -241,9 +241,7 @@ def _build_jdbc_application_arguments(self, jdbc_conn: Dict[str, Any]) -> Any: return arguments def submit_jdbc_job(self) -> None: - """ - Submit Spark JDBC job - """ + """Submit Spark JDBC job""" self._application_args = self._build_jdbc_application_arguments(self._jdbc_connection) self.submit(application=os.path.dirname(os.path.abspath(__file__)) + "/spark_jdbc_script.py") diff --git a/airflow/providers/apache/spark/hooks/spark_jdbc_script.py b/airflow/providers/apache/spark/hooks/spark_jdbc_script.py index 572ad5e96bd7b..0a0a720752fe1 100644 --- a/airflow/providers/apache/spark/hooks/spark_jdbc_script.py +++ b/airflow/providers/apache/spark/hooks/spark_jdbc_script.py @@ -69,9 +69,7 @@ def spark_write_to_jdbc( num_partitions: int, create_table_column_types: str, ) -> None: - """ - Transfer data from Spark to JDBC source - """ + """Transfer data from Spark to JDBC source""" writer = spark_session.table(metastore_table).write # first set common options writer = set_common_options(writer, url, jdbc_table, user, password, driver) @@ -106,9 +104,7 @@ def spark_read_from_jdbc( lower_bound: str, upper_bound: str, ) -> None: - """ - Transfer data from JDBC source to Spark - """ + """Transfer data from JDBC source to Spark""" # first set common options reader = set_common_options(spark_session.read, url, jdbc_table, user, password, driver) diff --git a/airflow/providers/apache/spark/hooks/spark_sql.py b/airflow/providers/apache/spark/hooks/spark_sql.py index cceb2bc1aeb62..8f1cf045cefa2 100644 --- a/airflow/providers/apache/spark/hooks/spark_sql.py +++ b/airflow/providers/apache/spark/hooks/spark_sql.py @@ -168,9 +168,7 @@ def run_query(self, cmd: str = "", **kwargs: Any) -> None: ) def kill(self) -> None: - """ - Kill Spark job - """ + """Kill Spark job""" if self._sp and self._sp.poll() is None: self.log.info("Killing the Spark-Sql job") self._sp.kill() diff --git a/airflow/providers/apache/spark/hooks/spark_submit.py b/airflow/providers/apache/spark/hooks/spark_submit.py index 15a8b9a0851f1..27ea7e1da946d 100644 --- a/airflow/providers/apache/spark/hooks/spark_submit.py +++ b/airflow/providers/apache/spark/hooks/spark_submit.py @@ -625,9 +625,7 @@ def _build_spark_driver_kill_command(self) -> List[str]: return connection_cmd def on_kill(self) -> None: - """ - Kill Spark submit command - """ + """Kill Spark submit command""" self.log.debug("Kill Command is being called") if self._should_track_driver_status: diff --git a/airflow/providers/apache/spark/operators/spark_jdbc.py b/airflow/providers/apache/spark/operators/spark_jdbc.py index a5f701a7542e2..63e42ca3fac12 100644 --- a/airflow/providers/apache/spark/operators/spark_jdbc.py +++ b/airflow/providers/apache/spark/operators/spark_jdbc.py @@ -185,9 +185,7 @@ def __init__( self._hook: Optional[SparkJDBCHook] = None def execute(self, context: Dict[str, Any]) -> None: - """ - Call the SparkSubmitHook to run the provided spark job - """ + """Call the SparkSubmitHook to run the provided spark job""" if self._hook is None: self._hook = self._get_hook() self._hook.submit_jdbc_job() diff --git a/airflow/providers/apache/spark/operators/spark_sql.py b/airflow/providers/apache/spark/operators/spark_sql.py index 6d6c69d635528..69254092a2c9b 100644 --- a/airflow/providers/apache/spark/operators/spark_sql.py +++ b/airflow/providers/apache/spark/operators/spark_sql.py @@ -99,9 +99,7 @@ def __init__( self._hook: Optional[SparkSqlHook] = None def execute(self, context: Dict[str, Any]) -> None: - """ - Call the SparkSqlHook to run the provided sql query - """ + """Call the SparkSqlHook to run the provided sql query""" if self._hook is None: self._hook = self._get_hook() self._hook.run_query() diff --git a/airflow/providers/apache/spark/operators/spark_submit.py b/airflow/providers/apache/spark/operators/spark_submit.py index c3c6b65a1abb7..224b53585195d 100644 --- a/airflow/providers/apache/spark/operators/spark_submit.py +++ b/airflow/providers/apache/spark/operators/spark_submit.py @@ -177,9 +177,7 @@ def __init__( self._conn_id = conn_id def execute(self, context: Dict[str, Any]) -> None: - """ - Call the SparkSubmitHook to run the provided spark job - """ + """Call the SparkSubmitHook to run the provided spark job""" if self._hook is None: self._hook = self._get_hook() self._hook.submit(self._application) diff --git a/airflow/providers/apache/sqoop/hooks/sqoop.py b/airflow/providers/apache/sqoop/hooks/sqoop.py index 7dfa665bb9b56..22f743f92e238 100644 --- a/airflow/providers/apache/sqoop/hooks/sqoop.py +++ b/airflow/providers/apache/sqoop/hooks/sqoop.py @@ -17,9 +17,7 @@ # under the License. # -""" -This module contains a sqoop 1.x hook -""" +"""This module contains a sqoop 1.x hook""" import subprocess from copy import deepcopy from typing import Any, Dict, List, Optional @@ -85,9 +83,7 @@ def get_conn(self) -> Any: return self.conn def cmd_mask_password(self, cmd_orig: List[str]) -> List[str]: - """ - Mask command password for safety - """ + """Mask command password for safety""" cmd = deepcopy(cmd_orig) try: password_index = cmd.index('--password') diff --git a/airflow/providers/apache/sqoop/operators/sqoop.py b/airflow/providers/apache/sqoop/operators/sqoop.py index 514d83fb7fa07..b35797a6c2f1c 100644 --- a/airflow/providers/apache/sqoop/operators/sqoop.py +++ b/airflow/providers/apache/sqoop/operators/sqoop.py @@ -16,10 +16,7 @@ # specific language governing permissions and limitations # under the License. # - -""" -This module contains a sqoop 1 operator -""" +"""This module contains a sqoop 1 operator""" import os import signal from typing import Any, Dict, Optional @@ -187,9 +184,7 @@ def __init__( self.hook: Optional[SqoopHook] = None def execute(self, context: Dict[str, Any]) -> None: - """ - Execute sqoop job - """ + """Execute sqoop job""" if self.hook is None: self.hook = self._get_hook() diff --git a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py index 3af1a2edb262b..95f7962569863 100644 --- a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py +++ b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py @@ -62,9 +62,7 @@ def __init__( self.client_configuration = client_configuration def get_conn(self) -> Any: - """ - Returns kubernetes api session for use with requests - """ + """Returns kubernetes api session for use with requests""" connection = self.get_connection(self.conn_id) extras = connection.extra_dejson in_cluster = extras.get("extra__kubernetes__in_cluster") @@ -169,9 +167,7 @@ def get_custom_object( raise AirflowException("Exception when calling -> get_custom_object: %s\n" % e) def get_namespace(self) -> str: - """ - Returns the namespace that defined in the connection - """ + """Returns the namespace that defined in the connection""" connection = self.get_connection(self.conn_id) extras = connection.extra_dejson namespace = extras.get("extra__kubernetes__namespace", "default") diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py index 1b5950d1101cd..e3703188cfbaf 100644 --- a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py +++ b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py @@ -452,9 +452,7 @@ def create_new_pod_for_operator(self, labels, launcher) -> Tuple[State, k8s.V1Po return final_state, self.pod, result def patch_already_checked(self, pod: k8s.V1Pod): - """ - Add an "already tried annotation to ensure we only retry once - """ + """Add an "already tried annotation to ensure we only retry once""" pod.metadata.labels["already_checked"] = "True" body = PodGenerator.serialize_pod(pod) self.client.patch_namespaced_pod(pod.metadata.name, pod.metadata.namespace, body) diff --git a/airflow/providers/databricks/hooks/databricks.py b/airflow/providers/databricks/hooks/databricks.py index 0b6efefb077c8..f3b4bef4141fd 100644 --- a/airflow/providers/databricks/hooks/databricks.py +++ b/airflow/providers/databricks/hooks/databricks.py @@ -45,9 +45,7 @@ class RunState: - """ - Utility class for the run state concept of Databricks runs. - """ + """Utility class for the run state concept of Databricks runs.""" def __init__(self, life_cycle_state: str, result_state: str, state_message: str) -> None: self.life_cycle_state = life_cycle_state diff --git a/airflow/providers/databricks/operators/databricks.py b/airflow/providers/databricks/operators/databricks.py index 1462a9de5fb8f..bbf9b3dcb5bb7 100644 --- a/airflow/providers/databricks/operators/databricks.py +++ b/airflow/providers/databricks/operators/databricks.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -This module contains Databricks operators. -""" +"""This module contains Databricks operators.""" import time from typing import Union, Optional, Any, Dict, List @@ -268,9 +266,7 @@ def __init__( do_xcom_push: bool = False, **kwargs, ) -> None: - """ - Creates a new ``DatabricksSubmitRunOperator``. - """ + """Creates a new ``DatabricksSubmitRunOperator``.""" super().__init__(**kwargs) self.json = json or {} self.databricks_conn_id = databricks_conn_id @@ -476,9 +472,7 @@ def __init__( do_xcom_push: bool = False, **kwargs, ) -> None: - """ - Creates a new ``DatabricksRunNowOperator``. - """ + """Creates a new ``DatabricksRunNowOperator``.""" super().__init__(**kwargs) self.json = json or {} self.databricks_conn_id = databricks_conn_id diff --git a/airflow/providers/datadog/hooks/datadog.py b/airflow/providers/datadog/hooks/datadog.py index e81118207321e..6267f6e643009 100644 --- a/airflow/providers/datadog/hooks/datadog.py +++ b/airflow/providers/datadog/hooks/datadog.py @@ -57,9 +57,7 @@ def __init__(self, datadog_conn_id: str = 'datadog_default') -> None: initialize(api_key=self.api_key, app_key=self.app_key) def validate_response(self, response: Dict[str, Any]) -> None: - """ - Validate Datadog response - """ + """Validate Datadog response""" if response['status'] != 'ok': self.log.error("Datadog returned: %s", response) raise AirflowException("Error status received from Datadog") diff --git a/airflow/providers/dingding/hooks/dingding.py b/airflow/providers/dingding/hooks/dingding.py index 5caef8d796b60..b1b71988448fd 100644 --- a/airflow/providers/dingding/hooks/dingding.py +++ b/airflow/providers/dingding/hooks/dingding.py @@ -65,9 +65,7 @@ def __init__( self.at_all = at_all def _get_endpoint(self) -> str: - """ - Get Dingding endpoint for sending message. - """ + """Get Dingding endpoint for sending message.""" conn = self.get_connection(self.http_conn_id) token = conn.password if not token: @@ -108,9 +106,7 @@ def get_conn(self, headers: Optional[dict] = None) -> Session: return session def send(self) -> None: - """ - Send Dingding message - """ + """Send Dingding message""" support_type = ['text', 'link', 'markdown', 'actionCard', 'feedCard'] if self.message_type not in support_type: raise ValueError( diff --git a/airflow/providers/discord/hooks/discord_webhook.py b/airflow/providers/discord/hooks/discord_webhook.py index b88efd73e2de8..60457b86cfaeb 100644 --- a/airflow/providers/discord/hooks/discord_webhook.py +++ b/airflow/providers/discord/hooks/discord_webhook.py @@ -127,9 +127,7 @@ def _build_discord_payload(self) -> str: return json.dumps(payload) def execute(self) -> None: - """ - Execute the Discord webhook call - """ + """Execute the Discord webhook call""" proxies = {} if self.proxy: # we only need https proxy for Discord diff --git a/airflow/providers/discord/operators/discord_webhook.py b/airflow/providers/discord/operators/discord_webhook.py index ea8758156e6e0..2c1dc64174ad8 100644 --- a/airflow/providers/discord/operators/discord_webhook.py +++ b/airflow/providers/discord/operators/discord_webhook.py @@ -84,9 +84,7 @@ def __init__( self.hook: Optional[DiscordWebhookHook] = None def execute(self, context: Dict) -> None: - """ - Call the DiscordWebhookHook to post message - """ + """Call the DiscordWebhookHook to post message""" self.hook = DiscordWebhookHook( self.http_conn_id, self.webhook_endpoint, diff --git a/airflow/providers/docker/operators/docker.py b/airflow/providers/docker/operators/docker.py index 4a292e07ed7bb..be544119dc3e2 100644 --- a/airflow/providers/docker/operators/docker.py +++ b/airflow/providers/docker/operators/docker.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Implements Docker operator -""" +"""Implements Docker operator""" import ast from tempfile import TemporaryDirectory from typing import Dict, Iterable, List, Optional, Union @@ -222,9 +220,7 @@ def get_hook(self) -> DockerHook: ) def _run_image(self) -> Optional[str]: - """ - Run a Docker container with the provided image - """ + """Run a Docker container with the provided image""" self.log.info('Starting docker container from image %s', self.image) with TemporaryDirectory(prefix='airflowtmp', dir=self.host_tmp_dir) as host_tmp_dir: diff --git a/airflow/providers/elasticsearch/hooks/elasticsearch.py b/airflow/providers/elasticsearch/hooks/elasticsearch.py index b322375664b7b..7fd7b7b13fd41 100644 --- a/airflow/providers/elasticsearch/hooks/elasticsearch.py +++ b/airflow/providers/elasticsearch/hooks/elasticsearch.py @@ -36,9 +36,7 @@ def __init__(self, schema: str = "http", connection: Optional[AirflowConnection] self.connection = connection def get_conn(self) -> ESConnection: - """ - Returns a elasticsearch connection object - """ + """Returns a elasticsearch connection object""" conn_id = getattr(self, self.conn_name_attr) conn = self.connection or self.get_connection(conn_id) diff --git a/airflow/providers/facebook/ads/hooks/ads.py b/airflow/providers/facebook/ads/hooks/ads.py index 1b095a24189a7..0aee7bbc3599b 100644 --- a/airflow/providers/facebook/ads/hooks/ads.py +++ b/airflow/providers/facebook/ads/hooks/ads.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Facebook Ads Reporting hooks -""" +"""This module contains Facebook Ads Reporting hooks""" import time from enum import Enum from typing import Any, Dict, List @@ -33,9 +31,7 @@ class JobStatus(Enum): - """ - Available options for facebook async task status - """ + """Available options for facebook async task status""" COMPLETED = 'Job Completed' STARTED = 'Job Started' diff --git a/airflow/providers/ftp/hooks/ftp.py b/airflow/providers/ftp/hooks/ftp.py index 23d0c39a69626..87ae53a7ca0c7 100644 --- a/airflow/providers/ftp/hooks/ftp.py +++ b/airflow/providers/ftp/hooks/ftp.py @@ -47,9 +47,7 @@ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: self.close_conn() def get_conn(self) -> ftplib.FTP: - """ - Returns a FTP connection object - """ + """Returns a FTP connection object""" if self.conn is None: params = self.get_connection(self.ftp_conn_id) pasv = params.extra_dejson.get("passive", True) @@ -261,14 +259,10 @@ def get_size(self, path: str) -> Optional[int]: class FTPSHook(FTPHook): - """ - Interact with FTPS. - """ + """Interact with FTPS.""" def get_conn(self) -> ftplib.FTP: - """ - Returns a FTPS connection object. - """ + """Returns a FTPS connection object.""" if self.conn is None: params = self.get_connection(self.ftp_conn_id) pasv = params.extra_dejson.get("passive", True) diff --git a/airflow/providers/google/ads/hooks/ads.py b/airflow/providers/google/ads/hooks/ads.py index a1323f5cd4071..5680cb1516bb1 100644 --- a/airflow/providers/google/ads/hooks/ads.py +++ b/airflow/providers/google/ads/hooks/ads.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Ad hook. -""" +"""This module contains Google Ad hook.""" from tempfile import NamedTemporaryFile from typing import IO, Any, Dict, Generator, List @@ -88,9 +86,7 @@ def __init__( @cached_property def _get_service(self) -> Resource: - """ - Connects and authenticates with the Google Ads API using a service account - """ + """Connects and authenticates with the Google Ads API using a service account""" with NamedTemporaryFile("w", suffix=".json") as secrets_temp: self._get_config() self._update_config_with_secret(secrets_temp) @@ -103,9 +99,7 @@ def _get_service(self) -> Resource: @cached_property def _get_customer_service(self) -> Resource: - """ - Connects and authenticates with the Google Ads API using a service account - """ + """Connects and authenticates with the Google Ads API using a service account""" with NamedTemporaryFile("w", suffix=".json") as secrets_temp: self._get_config() self._update_config_with_secret(secrets_temp) @@ -118,7 +112,8 @@ def _get_customer_service(self) -> Resource: def _get_config(self) -> None: """ - Gets google ads connection from meta db and sets google_ads_config attribute with returned config file + Gets google ads connection from meta db and sets google_ads_config attribute with returned config + file """ conn = self.get_connection(self.google_ads_conn_id) if "google_ads_client" not in conn.extra_dejson: diff --git a/airflow/providers/google/ads/operators/ads.py b/airflow/providers/google/ads/operators/ads.py index f3d72bbf51dde..acdfb3b1ac38f 100644 --- a/airflow/providers/google/ads/operators/ads.py +++ b/airflow/providers/google/ads/operators/ads.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Ad to GCS operators. -""" +"""This module contains Google Ad to GCS operators.""" import csv from tempfile import NamedTemporaryFile from typing import Optional, Sequence, Union diff --git a/airflow/providers/google/cloud/_internal_client/secret_manager_client.py b/airflow/providers/google/cloud/_internal_client/secret_manager_client.py index 7b14b8e3cbca7..7069aef3e3fdb 100644 --- a/airflow/providers/google/cloud/_internal_client/secret_manager_client.py +++ b/airflow/providers/google/cloud/_internal_client/secret_manager_client.py @@ -60,9 +60,7 @@ def is_valid_secret_name(secret_name: str) -> bool: @cached_property def client(self) -> SecretManagerServiceClient: - """ - Create an authenticated KMS client - """ + """Create an authenticated KMS client""" _client = SecretManagerServiceClient( credentials=self.credentials, client_info=ClientInfo(client_library_version='airflow_v' + version) ) diff --git a/airflow/providers/google/cloud/hooks/automl.py b/airflow/providers/google/cloud/hooks/automl.py index 4c38fe7691068..78ec4fbde54e0 100644 --- a/airflow/providers/google/cloud/hooks/automl.py +++ b/airflow/providers/google/cloud/hooks/automl.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -This module contains a Google AutoML hook. -""" +"""This module contains a Google AutoML hook.""" from typing import Dict, List, Optional, Sequence, Tuple, Union from cached_property import cached_property @@ -65,9 +63,7 @@ def __init__( @staticmethod def extract_object_id(obj: Dict) -> str: - """ - Returns unique id of the object. - """ + """Returns unique id of the object.""" return obj["name"].rpartition("/")[-1] def get_conn(self) -> AutoMlClient: diff --git a/airflow/providers/google/cloud/hooks/bigquery.py b/airflow/providers/google/cloud/hooks/bigquery.py index cd5a551563458..94681d6cdbb2c 100644 --- a/airflow/providers/google/cloud/hooks/bigquery.py +++ b/airflow/providers/google/cloud/hooks/bigquery.py @@ -65,9 +65,7 @@ # pylint: disable=too-many-public-methods class BigQueryHook(GoogleBaseHook, DbApiHook): - """ - Interact with BigQuery. This hook uses the Google Cloud connection. - """ + """Interact with BigQuery. This hook uses the Google Cloud connection.""" conn_name_attr = 'gcp_conn_id' # type: str @@ -102,9 +100,7 @@ def __init__( self.api_resource_configs = api_resource_configs if api_resource_configs else {} # type Dict def get_conn(self) -> "BigQueryConnection": - """ - Returns a BigQuery PEP 249 connection object. - """ + """Returns a BigQuery PEP 249 connection object.""" service = self.get_service() return BigQueryConnection( service=service, @@ -116,9 +112,7 @@ def get_conn(self) -> "BigQueryConnection": ) def get_service(self) -> Resource: - """ - Returns a BigQuery service object. - """ + """Returns a BigQuery service object.""" warnings.warn( "This method will be deprecated. Please use `BigQueryHook.get_client` method", DeprecationWarning ) @@ -1364,9 +1358,7 @@ def poll_job_complete( return job.done(retry=retry) def cancel_query(self) -> None: - """ - Cancel all started queries that have not yet completed - """ + """Cancel all started queries that have not yet completed""" warnings.warn( "This method is deprecated. Please use `BigQueryHook.cancel_job`.", DeprecationWarning, diff --git a/airflow/providers/google/cloud/hooks/bigquery_dts.py b/airflow/providers/google/cloud/hooks/bigquery_dts.py index 5bcb6096d9ee2..2d8d12bc039bd 100644 --- a/airflow/providers/google/cloud/hooks/bigquery_dts.py +++ b/airflow/providers/google/cloud/hooks/bigquery_dts.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -This module contains a BigQuery Hook. -""" +"""This module contains a BigQuery Hook.""" from copy import copy from typing import Optional, Sequence, Tuple, Union @@ -36,9 +34,7 @@ def get_object_id(obj: dict) -> str: - """ - Returns unique id of the object. - """ + """Returns unique id of the object.""" return obj["name"].rpartition("/")[-1] diff --git a/airflow/providers/google/cloud/hooks/bigtable.py b/airflow/providers/google/cloud/hooks/bigtable.py index c6eb84fa4c823..c5a2fa1004e24 100644 --- a/airflow/providers/google/cloud/hooks/bigtable.py +++ b/airflow/providers/google/cloud/hooks/bigtable.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Bigtable Hook. -""" +"""This module contains a Google Cloud Bigtable Hook.""" import enum import warnings from typing import Dict, List, Optional, Sequence, Union diff --git a/airflow/providers/google/cloud/hooks/cloud_memorystore.py b/airflow/providers/google/cloud/hooks/cloud_memorystore.py index 0317e92894df5..995db95363168 100644 --- a/airflow/providers/google/cloud/hooks/cloud_memorystore.py +++ b/airflow/providers/google/cloud/hooks/cloud_memorystore.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Hooks for Cloud Memorystore service -""" +"""Hooks for Cloud Memorystore service""" from typing import Dict, Optional, Sequence, Tuple, Union from google.api_core.exceptions import NotFound @@ -70,10 +68,7 @@ def __init__( self._client: Optional[CloudRedisClient] = None def get_conn(self): - """ - Retrieves client library object that allow access to Cloud Memorystore service. - - """ + """Retrieves client library object that allow access to Cloud Memorystore service.""" if not self._client: self._client = CloudRedisClient(credentials=self._get_credentials()) return self._client diff --git a/airflow/providers/google/cloud/hooks/cloud_sql.py b/airflow/providers/google/cloud/hooks/cloud_sql.py index 57a83e2994925..f26b76285a856 100644 --- a/airflow/providers/google/cloud/hooks/cloud_sql.py +++ b/airflow/providers/google/cloud/hooks/cloud_sql.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=too-many-lines -""" -This module contains a Google Cloud SQL Hook. -""" +"""This module contains a Google Cloud SQL Hook.""" import errno import json @@ -62,9 +60,7 @@ class CloudSqlOperationStatus: - """ - Helper class with operation statuses. - """ + """Helper class with operation statuses.""" PENDING = "PENDING" RUNNING = "RUNNING" @@ -609,9 +605,7 @@ def stop_proxy(self) -> None: os.remove(self.credentials_path) def get_proxy_version(self) -> Optional[str]: - """ - Returns version of the Cloud SQL Proxy. - """ + """Returns version of the Cloud SQL Proxy.""" self._download_sql_proxy_if_needed() command_to_run = [self.sql_proxy_path] command_to_run.extend(['--version']) @@ -958,9 +952,7 @@ def get_database_hook(self, connection: Connection) -> Union[PostgresHook, MySql return self.db_hook def cleanup_database_hook(self) -> None: - """ - Clean up database hook after it was used. - """ + """Clean up database hook after it was used.""" if self.database_type == 'postgres': if not self.db_hook: raise ValueError("The db_hook should be set") @@ -972,17 +964,13 @@ def cleanup_database_hook(self) -> None: self.log.info(output) def reserve_free_tcp_port(self) -> None: - """ - Reserve free TCP port to be used by Cloud SQL Proxy - """ + """Reserve free TCP port to be used by Cloud SQL Proxy""" self.reserved_tcp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.reserved_tcp_socket.bind(('127.0.0.1', 0)) self.sql_proxy_tcp_port = self.reserved_tcp_socket.getsockname()[1] def free_reserved_port(self) -> None: - """ - Free TCP port. Makes it immediately ready to be used by Cloud SQL Proxy. - """ + """Free TCP port. Makes it immediately ready to be used by Cloud SQL Proxy.""" if self.reserved_tcp_socket: self.reserved_tcp_socket.close() self.reserved_tcp_socket = None diff --git a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py b/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py index 604432ada6d96..0e30cb0399d3f 100644 --- a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +++ b/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Storage Transfer Service Hook. -""" +"""This module contains a Google Storage Transfer Service Hook.""" import json import logging @@ -40,9 +38,7 @@ class GcpTransferJobsStatus: - """ - Class with Google Cloud Transfer jobs statuses. - """ + """Class with Google Cloud Transfer jobs statuses.""" ENABLED = "ENABLED" DISABLED = "DISABLED" @@ -50,9 +46,7 @@ class GcpTransferJobsStatus: class GcpTransferOperationStatus: - """ - Class with Google Cloud Transfer operations statuses. - """ + """Class with Google Cloud Transfer operations statuses.""" IN_PROGRESS = "IN_PROGRESS" PAUSED = "PAUSED" diff --git a/airflow/providers/google/cloud/hooks/compute.py b/airflow/providers/google/cloud/hooks/compute.py index 8beee06f2bb36..960bbdaf9e721 100644 --- a/airflow/providers/google/cloud/hooks/compute.py +++ b/airflow/providers/google/cloud/hooks/compute.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Compute Engine Hook. -""" +"""This module contains a Google Compute Engine Hook.""" import time from typing import Any, Optional, Sequence, Union @@ -32,9 +30,7 @@ class GceOperationStatus: - """ - Class with GCE operations statuses. - """ + """Class with GCE operations statuses.""" PENDING = "PENDING" RUNNING = "RUNNING" diff --git a/airflow/providers/google/cloud/hooks/datacatalog.py b/airflow/providers/google/cloud/hooks/datacatalog.py index af1b5e10e19eb..3bdfa50c521f4 100644 --- a/airflow/providers/google/cloud/hooks/datacatalog.py +++ b/airflow/providers/google/cloud/hooks/datacatalog.py @@ -68,9 +68,7 @@ def __init__( self._client: Optional[DataCatalogClient] = None def get_conn(self) -> DataCatalogClient: - """ - Retrieves client library object that allow access to Cloud Data Catalog service. - """ + """Retrieves client library object that allow access to Cloud Data Catalog service.""" if not self._client: self._client = DataCatalogClient( credentials=self._get_credentials(), client_info=self.client_info diff --git a/airflow/providers/google/cloud/hooks/dataflow.py b/airflow/providers/google/cloud/hooks/dataflow.py index bcfaf68a760be..8953a39b63b0f 100644 --- a/airflow/providers/google/cloud/hooks/dataflow.py +++ b/airflow/providers/google/cloud/hooks/dataflow.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Dataflow Hook. -""" +"""This module contains a Google Dataflow Hook.""" import functools import json import re @@ -125,9 +123,7 @@ class DataflowJobStatus: class DataflowJobType: - """ - Helper class with Dataflow job types. - """ + """Helper class with Dataflow job types.""" JOB_TYPE_UNKNOWN = "JOB_TYPE_UNKNOWN" JOB_TYPE_BATCH = "JOB_TYPE_BATCH" @@ -285,9 +281,7 @@ def _check_dataflow_job_state(self, job) -> bool: ) def wait_for_done(self) -> None: - """ - Helper method to wait for result of submitted job. - """ + """Helper method to wait for result of submitted job.""" self.log.info("Start waiting for done.") self._refresh_jobs() while self._jobs and not all(self._check_dataflow_job_state(job) for job in self._jobs): @@ -310,9 +304,7 @@ def get_jobs(self, refresh=False) -> List[dict]: return self._jobs def cancel(self) -> None: - """ - Cancels current job - """ + """Cancels current job""" jobs = self.get_jobs() job_ids = [job['id'] for job in jobs if job['currentState'] not in DataflowJobStatus.TERMINAL_STATES] if job_ids: @@ -444,9 +436,7 @@ def __init__( ) def get_conn(self) -> build: - """ - Returns a Google Cloud Dataflow service object. - """ + """Returns a Google Cloud Dataflow service object.""" http_authorized = self._authorize() return build('dataflow', 'v1b3', http=http_authorized, cache_discovery=False) diff --git a/airflow/providers/google/cloud/hooks/datafusion.py b/airflow/providers/google/cloud/hooks/datafusion.py index 1beeba45d1717..8e686e8499fb2 100644 --- a/airflow/providers/google/cloud/hooks/datafusion.py +++ b/airflow/providers/google/cloud/hooks/datafusion.py @@ -15,9 +15,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains Google DataFusion hook. -""" +"""This module contains Google DataFusion hook.""" import json import os from time import monotonic, sleep @@ -53,9 +51,7 @@ class PipelineStates: class DataFusionHook(GoogleBaseHook): - """ - Hook for Google DataFusion. - """ + """Hook for Google DataFusion.""" _conn = None # type: Optional[Resource] @@ -74,9 +70,7 @@ def __init__( self.api_version = api_version def wait_for_operation(self, operation: Dict[str, Any]) -> Dict[str, Any]: - """ - Waits for long-lasting operation to complete. - """ + """Waits for long-lasting operation to complete.""" for time_to_wait in exponential_sleep_generator(initial=10, maximum=120): sleep(time_to_wait) operation = ( @@ -162,9 +156,7 @@ def _cdap_request( return response def get_conn(self) -> Resource: - """ - Retrieves connection to DataFusion. - """ + """Retrieves connection to DataFusion.""" if not self._conn: http_authorized = self._authorize() self._conn = build( diff --git a/airflow/providers/google/cloud/hooks/dataprep.py b/airflow/providers/google/cloud/hooks/dataprep.py index 41f27a7b57ddb..d5c8ab089a5c2 100644 --- a/airflow/providers/google/cloud/hooks/dataprep.py +++ b/airflow/providers/google/cloud/hooks/dataprep.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Dataprep hook. -""" +"""This module contains Google Dataprep hook.""" import json import os from typing import Any, Dict diff --git a/airflow/providers/google/cloud/hooks/dataproc.py b/airflow/providers/google/cloud/hooks/dataproc.py index 079ddb2dac72b..9588cea8a9d9c 100644 --- a/airflow/providers/google/cloud/hooks/dataproc.py +++ b/airflow/providers/google/cloud/hooks/dataproc.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -This module contains a Google Cloud Dataproc hook. -""" +"""This module contains a Google Cloud Dataproc hook.""" import time import uuid @@ -47,9 +45,7 @@ class DataProcJobBuilder: - """ - A helper class for building Dataproc job. - """ + """A helper class for building Dataproc job.""" def __init__( self, @@ -214,9 +210,7 @@ class DataprocHook(GoogleBaseHook): """ def get_cluster_client(self, location: Optional[str] = None) -> ClusterControllerClient: - """ - Returns ClusterControllerClient. - """ + """Returns ClusterControllerClient.""" client_options = ( {'api_endpoint': '{}-dataproc.googleapis.com:443'.format(location)} if location else None ) @@ -227,17 +221,13 @@ def get_cluster_client(self, location: Optional[str] = None) -> ClusterControlle @cached_property def get_template_client(self) -> WorkflowTemplateServiceClient: - """ - Returns WorkflowTemplateServiceClient. - """ + """Returns WorkflowTemplateServiceClient.""" return WorkflowTemplateServiceClient( credentials=self._get_credentials(), client_info=self.client_info ) def get_job_client(self, location: Optional[str] = None) -> JobControllerClient: - """ - Returns JobControllerClient. - """ + """Returns JobControllerClient.""" client_options = ( {'api_endpoint': '{}-dataproc.googleapis.com:443'.format(location)} if location else None ) diff --git a/airflow/providers/google/cloud/hooks/datastore.py b/airflow/providers/google/cloud/hooks/datastore.py index f397762b80092..c8ca3de1cb7d9 100644 --- a/airflow/providers/google/cloud/hooks/datastore.py +++ b/airflow/providers/google/cloud/hooks/datastore.py @@ -16,9 +16,8 @@ # specific language governing permissions and limitations # under the License. # -""" -This module contains Google Datastore hook. -""" +"""This module contains Google Datastore hook.""" + import time import warnings diff --git a/airflow/providers/google/cloud/hooks/functions.py b/airflow/providers/google/cloud/hooks/functions.py index 73519762bb046..a4efe099950dc 100644 --- a/airflow/providers/google/cloud/hooks/functions.py +++ b/airflow/providers/google/cloud/hooks/functions.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Functions Hook. -""" +"""This module contains a Google Cloud Functions Hook.""" import time from typing import Any, Dict, List, Optional, Sequence, Union diff --git a/airflow/providers/google/cloud/hooks/gcs.py b/airflow/providers/google/cloud/hooks/gcs.py index 68ac78c13ec8a..8740a44b5bab2 100644 --- a/airflow/providers/google/cloud/hooks/gcs.py +++ b/airflow/providers/google/cloud/hooks/gcs.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -This module contains a Google Cloud Storage hook. -""" +"""This module contains a Google Cloud Storage hook.""" import functools import gzip as gz import os @@ -140,9 +138,7 @@ def __init__( ) def get_conn(self) -> storage.Client: - """ - Returns a Google Cloud Storage service object. - """ + """Returns a Google Cloud Storage service object.""" if not self._conn: self._conn = storage.Client( credentials=self._get_credentials(), client_info=self.client_info, project=self.project_id diff --git a/airflow/providers/google/cloud/hooks/kms.py b/airflow/providers/google/cloud/hooks/kms.py index 00576439b9a48..fe75e6dd84db8 100644 --- a/airflow/providers/google/cloud/hooks/kms.py +++ b/airflow/providers/google/cloud/hooks/kms.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -This module contains a Google Cloud KMS hook. -""" +"""This module contains a Google Cloud KMS hook""" import base64 diff --git a/airflow/providers/google/cloud/hooks/kubernetes_engine.py b/airflow/providers/google/cloud/hooks/kubernetes_engine.py index 2d4475b913818..930c1cd87c1bf 100644 --- a/airflow/providers/google/cloud/hooks/kubernetes_engine.py +++ b/airflow/providers/google/cloud/hooks/kubernetes_engine.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -This module contains a Google Kubernetes Engine Hook. -""" +"""This module contains a Google Kubernetes Engine Hook.""" import time import warnings diff --git a/airflow/providers/google/cloud/hooks/mlengine.py b/airflow/providers/google/cloud/hooks/mlengine.py index 758dae0962a05..4aa18bfab6c42 100644 --- a/airflow/providers/google/cloud/hooks/mlengine.py +++ b/airflow/providers/google/cloud/hooks/mlengine.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google ML Engine Hook. -""" +"""This module contains a Google ML Engine Hook.""" import logging import random import time diff --git a/airflow/providers/google/cloud/hooks/natural_language.py b/airflow/providers/google/cloud/hooks/natural_language.py index 8fc0090d179ba..505f6cf9d2804 100644 --- a/airflow/providers/google/cloud/hooks/natural_language.py +++ b/airflow/providers/google/cloud/hooks/natural_language.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Natural Language Hook. -""" +"""This module contains a Google Cloud Natural Language Hook.""" from typing import Optional, Sequence, Tuple, Union from google.api_core.retry import Retry diff --git a/airflow/providers/google/cloud/hooks/pubsub.py b/airflow/providers/google/cloud/hooks/pubsub.py index 8206715f075d9..3a62ce73e6085 100644 --- a/airflow/providers/google/cloud/hooks/pubsub.py +++ b/airflow/providers/google/cloud/hooks/pubsub.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Pub/Sub Hook. -""" +"""This module contains a Google Pub/Sub Hook.""" import warnings from base64 import b64decode from typing import Dict, List, Optional, Sequence, Tuple, Union @@ -44,9 +42,7 @@ class PubSubException(Exception): - """ - Alias for Exception. - """ + """Alias for Exception.""" class PubSubHook(GoogleBaseHook): diff --git a/airflow/providers/google/cloud/hooks/spanner.py b/airflow/providers/google/cloud/hooks/spanner.py index 9a4d0b89d24ed..6344f3769d30a 100644 --- a/airflow/providers/google/cloud/hooks/spanner.py +++ b/airflow/providers/google/cloud/hooks/spanner.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Spanner Hook. -""" +"""This module contains a Google Cloud Spanner Hook.""" from typing import Callable, List, Optional, Sequence, Union from google.api_core.exceptions import AlreadyExists, GoogleAPICallError diff --git a/airflow/providers/google/cloud/hooks/speech_to_text.py b/airflow/providers/google/cloud/hooks/speech_to_text.py index 06f25f36f34d1..fb74a0703087c 100644 --- a/airflow/providers/google/cloud/hooks/speech_to_text.py +++ b/airflow/providers/google/cloud/hooks/speech_to_text.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Speech Hook. -""" +"""This module contains a Google Cloud Speech Hook.""" from typing import Dict, Optional, Sequence, Union from google.api_core.retry import Retry diff --git a/airflow/providers/google/cloud/hooks/stackdriver.py b/airflow/providers/google/cloud/hooks/stackdriver.py index 2588d94dee9a5..06a62e228a9b4 100644 --- a/airflow/providers/google/cloud/hooks/stackdriver.py +++ b/airflow/providers/google/cloud/hooks/stackdriver.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains Google Cloud Stackdriver operators. -""" +"""This module contains Google Cloud Stackdriver operators.""" import json from typing import Any, Optional, Sequence, Union @@ -34,9 +32,7 @@ class StackdriverHook(GoogleBaseHook): - """ - Stackdriver Hook for connecting with Google Cloud Stackdriver - """ + """Stackdriver Hook for connecting with Google Cloud Stackdriver""" def __init__( self, diff --git a/airflow/providers/google/cloud/hooks/text_to_speech.py b/airflow/providers/google/cloud/hooks/text_to_speech.py index 9dcef15e1d5e8..ca5904e54eb1e 100644 --- a/airflow/providers/google/cloud/hooks/text_to_speech.py +++ b/airflow/providers/google/cloud/hooks/text_to_speech.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Text to Speech Hook. -""" +"""This module contains a Google Cloud Text to Speech Hook.""" from typing import Dict, Optional, Sequence, Union from google.api_core.retry import Retry diff --git a/airflow/providers/google/cloud/hooks/translate.py b/airflow/providers/google/cloud/hooks/translate.py index a2d3abfa80b25..d6c1f6c9f99c7 100644 --- a/airflow/providers/google/cloud/hooks/translate.py +++ b/airflow/providers/google/cloud/hooks/translate.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Translate Hook. -""" +"""This module contains a Google Cloud Translate Hook.""" from typing import List, Optional, Sequence, Union from google.cloud.translate_v2 import Client diff --git a/airflow/providers/google/cloud/hooks/video_intelligence.py b/airflow/providers/google/cloud/hooks/video_intelligence.py index dcfd4425aea82..96c1c79c5f8ce 100644 --- a/airflow/providers/google/cloud/hooks/video_intelligence.py +++ b/airflow/providers/google/cloud/hooks/video_intelligence.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Video Intelligence Hook. -""" +"""This module contains a Google Cloud Video Intelligence Hook.""" from typing import Dict, List, Optional, Sequence, Tuple, Union from google.api_core.operation import Operation diff --git a/airflow/providers/google/cloud/hooks/vision.py b/airflow/providers/google/cloud/hooks/vision.py index 51657a60b2634..b0d5a132de596 100644 --- a/airflow/providers/google/cloud/hooks/vision.py +++ b/airflow/providers/google/cloud/hooks/vision.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Vision Hook. -""" +"""This module contains a Google Cloud Vision Hook.""" from copy import deepcopy from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -51,9 +49,7 @@ class NameDeterminer: - """ - Helper class to determine entity name. - """ + """Helper class to determine entity name.""" def __init__(self, label: str, id_label: str, get_path: Callable[[str, str, str], str]) -> None: self.label = label diff --git a/airflow/providers/google/cloud/log/gcs_task_handler.py b/airflow/providers/google/cloud/log/gcs_task_handler.py index f4a8a0bbb0313..eac4e22299fcf 100644 --- a/airflow/providers/google/cloud/log/gcs_task_handler.py +++ b/airflow/providers/google/cloud/log/gcs_task_handler.py @@ -110,9 +110,7 @@ def set_context(self, ti): self.upload_on_close = not ti.raw def close(self): - """ - Close and upload local log file to remote storage GCS. - """ + """Close and upload local log file to remote storage GCS.""" # When application exit, system shuts down all handlers by # calling close method. Here we check if logger is already # closed to prevent uploading the log to remote storage multiple diff --git a/airflow/providers/google/cloud/log/stackdriver_task_handler.py b/airflow/providers/google/cloud/log/stackdriver_task_handler.py index 096ad68e6b1eb..45c6269734df5 100644 --- a/airflow/providers/google/cloud/log/stackdriver_task_handler.py +++ b/airflow/providers/google/cloud/log/stackdriver_task_handler.py @@ -14,9 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Handler that integrates with Stackdriver -""" +"""Handler that integrates with Stackdriver""" import logging from typing import Collection, Dict, List, Optional, Tuple, Type from urllib.parse import urlencode diff --git a/airflow/providers/google/cloud/operators/automl.py b/airflow/providers/google/cloud/operators/automl.py index 435e1fd6f60ac..a1823cdd754a8 100644 --- a/airflow/providers/google/cloud/operators/automl.py +++ b/airflow/providers/google/cloud/operators/automl.py @@ -17,9 +17,7 @@ # under the License. # # pylint: disable=too-many-lines -""" -This module contains Google AutoML operators. -""" +"""This module contains Google AutoML operators.""" import ast from typing import Dict, List, Optional, Sequence, Tuple, Union diff --git a/airflow/providers/google/cloud/operators/bigquery.py b/airflow/providers/google/cloud/operators/bigquery.py index ad60588b373cb..72dbbce445f9b 100644 --- a/airflow/providers/google/cloud/operators/bigquery.py +++ b/airflow/providers/google/cloud/operators/bigquery.py @@ -17,9 +17,7 @@ # under the License. # pylint: disable=too-many-lines -""" -This module contains Google BigQuery operators. -""" +"""This module contains Google BigQuery operators.""" import enum import hashlib import json @@ -58,9 +56,7 @@ class BigQueryUIColors(enum.Enum): class BigQueryConsoleLink(BaseOperatorLink): - """ - Helper class for constructing BigQuery link. - """ + """Helper class for constructing BigQuery link.""" name = 'BigQuery Console' @@ -72,9 +68,7 @@ def get_link(self, operator, dttm): @attr.s(auto_attribs=True) class BigQueryConsoleIndexableLink(BaseOperatorLink): - """ - Helper class for constructing BigQuery link. - """ + """Helper class for constructing BigQuery link.""" index: int = attr.ib() @@ -607,9 +601,7 @@ class BigQueryExecuteQueryOperator(BaseOperator): @property def operator_extra_links(self): - """ - Return operator extra links - """ + """Return operator extra links""" if isinstance(self.sql, str): return (BigQueryConsoleLink(),) return (BigQueryConsoleIndexableLink(i) for i, _ in enumerate(self.sql)) diff --git a/airflow/providers/google/cloud/operators/bigquery_dts.py b/airflow/providers/google/cloud/operators/bigquery_dts.py index 1c401fc39fc8f..e941bd4c4be53 100644 --- a/airflow/providers/google/cloud/operators/bigquery_dts.py +++ b/airflow/providers/google/cloud/operators/bigquery_dts.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google BigQuery Data Transfer Service operators. -""" +"""This module contains Google BigQuery Data Transfer Service operators.""" from typing import Optional, Sequence, Tuple, Union from google.api_core.retry import Retry diff --git a/airflow/providers/google/cloud/operators/bigtable.py b/airflow/providers/google/cloud/operators/bigtable.py index ab9cf2eacac86..ad375c29b7492 100644 --- a/airflow/providers/google/cloud/operators/bigtable.py +++ b/airflow/providers/google/cloud/operators/bigtable.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Cloud Bigtable operators. -""" +"""This module contains Google Cloud Bigtable operators.""" import enum from typing import Dict, Iterable, List, Optional, Sequence, Union @@ -32,9 +30,7 @@ class BigtableValidationMixin: - """ - Common class for Cloud Bigtable operators for validating required fields. - """ + """Common class for Cloud Bigtable operators for validating required fields.""" REQUIRED_ATTRIBUTES = [] # type: Iterable[str] diff --git a/airflow/providers/google/cloud/operators/cloud_sql.py b/airflow/providers/google/cloud/operators/cloud_sql.py index 162264d1f358d..f3eeaf23062fa 100644 --- a/airflow/providers/google/cloud/operators/cloud_sql.py +++ b/airflow/providers/google/cloud/operators/cloud_sql.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Cloud SQL operators. -""" +"""This module contains Google Cloud SQL operators.""" from typing import Dict, Iterable, List, Optional, Sequence, Union from googleapiclient.errors import HttpError diff --git a/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py b/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py index 536ebb1bff076..37b616574fe28 100644 --- a/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +++ b/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -This module contains Google Cloud Transfer operators. -""" +"""This module contains Google Cloud Transfer operators.""" from copy import deepcopy from datetime import date, time from typing import Dict, Optional, Sequence, Union, List @@ -58,9 +56,7 @@ class TransferJobPreprocessor: - """ - Helper class for preprocess of transfer job body. - """ + """Helper class for preprocess of transfer job body.""" def __init__(self, body: dict, aws_conn_id: str = 'aws_default', default_schedule: bool = False) -> None: self.body = body @@ -116,23 +112,17 @@ def process_body(self) -> dict: @staticmethod def _convert_date_to_dict(field_date: date) -> dict: - """ - Convert native python ``datetime.date`` object to a format supported by the API - """ + """Convert native python ``datetime.date`` object to a format supported by the API""" return {DAY: field_date.day, MONTH: field_date.month, YEAR: field_date.year} @staticmethod def _convert_time_to_dict(time_object: time) -> dict: - """ - Convert native python ``datetime.time`` object to a format supported by the API - """ + """Convert native python ``datetime.time`` object to a format supported by the API""" return {HOURS: time_object.hour, MINUTES: time_object.minute, SECONDS: time_object.second} class TransferJobValidator: - """ - Helper class for validating transfer job body. - """ + """Helper class for validating transfer job body.""" def __init__(self, body: dict) -> None: if not body: diff --git a/airflow/providers/google/cloud/operators/compute.py b/airflow/providers/google/cloud/operators/compute.py index d5bc4dc7a7622..3610837c35a39 100644 --- a/airflow/providers/google/cloud/operators/compute.py +++ b/airflow/providers/google/cloud/operators/compute.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Compute Engine operators. -""" +"""This module contains Google Compute Engine operators.""" from copy import deepcopy from typing import Any, Dict, List, Optional, Sequence, Union @@ -34,9 +32,7 @@ class ComputeEngineBaseOperator(BaseOperator): - """ - Abstract base operator for Google Compute Engine operators to inherit from. - """ + """Abstract base operator for Google Compute Engine operators to inherit from.""" @apply_defaults def __init__( diff --git a/airflow/providers/google/cloud/operators/dataflow.py b/airflow/providers/google/cloud/operators/dataflow.py index 588c13a719601..1b08008d21f74 100644 --- a/airflow/providers/google/cloud/operators/dataflow.py +++ b/airflow/providers/google/cloud/operators/dataflow.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Dataflow operators. -""" +"""This module contains Google Dataflow operators.""" import copy import re diff --git a/airflow/providers/google/cloud/operators/datafusion.py b/airflow/providers/google/cloud/operators/datafusion.py index c02fb946d86c7..d302a08d2001c 100644 --- a/airflow/providers/google/cloud/operators/datafusion.py +++ b/airflow/providers/google/cloud/operators/datafusion.py @@ -15,9 +15,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains Google DataFusion operators. -""" +"""This module contains Google DataFusion operators.""" from time import sleep from typing import Any, Dict, List, Optional, Sequence, Union diff --git a/airflow/providers/google/cloud/operators/dataprep.py b/airflow/providers/google/cloud/operators/dataprep.py index e53ba84f7988e..b22f0285c9e8e 100644 --- a/airflow/providers/google/cloud/operators/dataprep.py +++ b/airflow/providers/google/cloud/operators/dataprep.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Dataprep operator. -""" +"""This module contains a Google Dataprep operator.""" from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.dataprep import GoogleDataprepHook diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/airflow/providers/google/cloud/operators/dataproc.py index c6fcdc01b0e53..50f425a4aef72 100644 --- a/airflow/providers/google/cloud/operators/dataproc.py +++ b/airflow/providers/google/cloud/operators/dataproc.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -This module contains Google Dataproc operators. -""" +"""This module contains Google Dataproc operators.""" # pylint: disable=C0302 import inspect @@ -750,9 +748,7 @@ def _graceful_decommission_timeout_object(self) -> Optional[Dict[str, int]]: return {'seconds': timeout} def execute(self, context) -> None: - """ - Scale, up or down, a cluster on Google Cloud Dataproc. - """ + """Scale, up or down, a cluster on Google Cloud Dataproc.""" self.log.info("Scaling cluster: %s", self.cluster_name) scaling_cluster_data = self._build_scale_cluster_data() @@ -960,9 +956,7 @@ def __init__( self.asynchronous = asynchronous def create_job_template(self): - """ - Initialize `self.job_template` with default values - """ + """Initialize `self.job_template` with default values""" self.job_template = DataProcJobBuilder( project_id=self.project_id, task_id=self.task_id, @@ -1464,9 +1458,7 @@ def _generate_temp_filename(filename): return "{}_{}_{}".format(date, str(uuid.uuid4())[:8], ntpath.basename(filename)) def _upload_file_temp(self, bucket, local_file): - """ - Upload a local file to a Google Cloud Storage bucket. - """ + """Upload a local file to a Google Cloud Storage bucket.""" temp_filename = self._generate_temp_filename(local_file) if not bucket: raise AirflowException( diff --git a/airflow/providers/google/cloud/operators/datastore.py b/airflow/providers/google/cloud/operators/datastore.py index 30f4548edd643..9a5a572ac5c14 100644 --- a/airflow/providers/google/cloud/operators/datastore.py +++ b/airflow/providers/google/cloud/operators/datastore.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -This module contains Google Datastore operators. -""" +"""This module contains Google Datastore operators.""" from typing import Any, Dict, List, Optional, Sequence, Union from airflow.exceptions import AirflowException diff --git a/airflow/providers/google/cloud/operators/functions.py b/airflow/providers/google/cloud/operators/functions.py index 7066e1bedbc21..2b4d641a44421 100644 --- a/airflow/providers/google/cloud/operators/functions.py +++ b/airflow/providers/google/cloud/operators/functions.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Cloud Functions operators. -""" +"""This module contains Google Cloud Functions operators.""" import re from typing import Any, Dict, List, Optional, Sequence, Union diff --git a/airflow/providers/google/cloud/operators/gcs.py b/airflow/providers/google/cloud/operators/gcs.py index a4c38b0930816..03593211d8f71 100644 --- a/airflow/providers/google/cloud/operators/gcs.py +++ b/airflow/providers/google/cloud/operators/gcs.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Storage Bucket operator. -""" +"""This module contains a Google Cloud Storage Bucket operator.""" import subprocess import sys import warnings diff --git a/airflow/providers/google/cloud/operators/kubernetes_engine.py b/airflow/providers/google/cloud/operators/kubernetes_engine.py index 5212fba557f59..407c76a19a600 100644 --- a/airflow/providers/google/cloud/operators/kubernetes_engine.py +++ b/airflow/providers/google/cloud/operators/kubernetes_engine.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -This module contains Google Kubernetes Engine operators. -""" +"""This module contains Google Kubernetes Engine operators.""" import os import tempfile diff --git a/airflow/providers/google/cloud/operators/mlengine.py b/airflow/providers/google/cloud/operators/mlengine.py index 2fd34e7c2103c..8f5fa42374b03 100644 --- a/airflow/providers/google/cloud/operators/mlengine.py +++ b/airflow/providers/google/cloud/operators/mlengine.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Cloud MLEngine operators. -""" +"""This module contains Google Cloud MLEngine operators.""" import logging import re import warnings @@ -1057,9 +1055,7 @@ def execute(self, context): class AIPlatformConsoleLink(BaseOperatorLink): - """ - Helper class for constructing AI Platform Console link. - """ + """Helper class for constructing AI Platform Console link.""" name = "AI Platform Console" diff --git a/airflow/providers/google/cloud/operators/natural_language.py b/airflow/providers/google/cloud/operators/natural_language.py index 927e3608a7d50..fa9e89e3d4362 100644 --- a/airflow/providers/google/cloud/operators/natural_language.py +++ b/airflow/providers/google/cloud/operators/natural_language.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Cloud Language operators. -""" +"""This module contains Google Cloud Language operators.""" from typing import Optional, Sequence, Tuple, Union from google.api_core.retry import Retry diff --git a/airflow/providers/google/cloud/operators/pubsub.py b/airflow/providers/google/cloud/operators/pubsub.py index e539e6dc63b51..d07f529551bd6 100644 --- a/airflow/providers/google/cloud/operators/pubsub.py +++ b/airflow/providers/google/cloud/operators/pubsub.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google PubSub operators. -""" +"""This module contains Google PubSub operators.""" import warnings from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union diff --git a/airflow/providers/google/cloud/operators/spanner.py b/airflow/providers/google/cloud/operators/spanner.py index 12019bd80aa13..df69ac81d1e47 100644 --- a/airflow/providers/google/cloud/operators/spanner.py +++ b/airflow/providers/google/cloud/operators/spanner.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Spanner operators. -""" +"""This module contains Google Spanner operators.""" from typing import List, Optional, Sequence, Union from airflow.exceptions import AirflowException diff --git a/airflow/providers/google/cloud/operators/speech_to_text.py b/airflow/providers/google/cloud/operators/speech_to_text.py index b8c7933157c1a..0bed07581f8bf 100644 --- a/airflow/providers/google/cloud/operators/speech_to_text.py +++ b/airflow/providers/google/cloud/operators/speech_to_text.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Speech to Text operator. -""" +"""This module contains a Google Speech to Text operator.""" from typing import Optional, Sequence, Union from google.api_core.retry import Retry diff --git a/airflow/providers/google/cloud/operators/text_to_speech.py b/airflow/providers/google/cloud/operators/text_to_speech.py index 16dfe1b44b222..6965aca1885cb 100644 --- a/airflow/providers/google/cloud/operators/text_to_speech.py +++ b/airflow/providers/google/cloud/operators/text_to_speech.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Text to Speech operator. -""" +"""This module contains a Google Text to Speech operator.""" from tempfile import NamedTemporaryFile from typing import Dict, Optional, Sequence, Union diff --git a/airflow/providers/google/cloud/operators/translate.py b/airflow/providers/google/cloud/operators/translate.py index bfc33becb4a3b..d38bb7c537960 100644 --- a/airflow/providers/google/cloud/operators/translate.py +++ b/airflow/providers/google/cloud/operators/translate.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Translate operators. -""" +"""This module contains Google Translate operators.""" from typing import List, Optional, Sequence, Union from airflow.exceptions import AirflowException diff --git a/airflow/providers/google/cloud/operators/translate_speech.py b/airflow/providers/google/cloud/operators/translate_speech.py index ae75681a88eb5..645e5903cc01d 100644 --- a/airflow/providers/google/cloud/operators/translate_speech.py +++ b/airflow/providers/google/cloud/operators/translate_speech.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Translate Speech operator. -""" +"""This module contains a Google Cloud Translate Speech operator.""" from typing import Optional, Sequence, Union from google.cloud.speech_v1.types import RecognitionAudio, RecognitionConfig diff --git a/airflow/providers/google/cloud/operators/video_intelligence.py b/airflow/providers/google/cloud/operators/video_intelligence.py index dd759a1806e65..fdb73cd1b0164 100644 --- a/airflow/providers/google/cloud/operators/video_intelligence.py +++ b/airflow/providers/google/cloud/operators/video_intelligence.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Cloud Vision operators. -""" +"""This module contains Google Cloud Vision operators.""" from typing import Dict, Optional, Sequence, Union from google.api_core.retry import Retry diff --git a/airflow/providers/google/cloud/operators/vision.py b/airflow/providers/google/cloud/operators/vision.py index 14a80c1528168..73a47fb36706d 100644 --- a/airflow/providers/google/cloud/operators/vision.py +++ b/airflow/providers/google/cloud/operators/vision.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Vision operator. -""" +"""This module contains a Google Cloud Vision operator.""" from copy import deepcopy from typing import Any, Dict, List, Optional, Sequence, Tuple, Union diff --git a/airflow/providers/google/cloud/secrets/secret_manager.py b/airflow/providers/google/cloud/secrets/secret_manager.py index 7932b89a9af56..0322d860c7ab4 100644 --- a/airflow/providers/google/cloud/secrets/secret_manager.py +++ b/airflow/providers/google/cloud/secrets/secret_manager.py @@ -15,9 +15,7 @@ # specific language governing permissions and limitations # under the License. -""" -Objects relating to sourcing connections from Google Cloud Secrets Manager -""" +"""Objects relating to sourcing connections from Google Cloud Secrets Manager""" from typing import Optional from cached_property import cached_property diff --git a/airflow/providers/google/cloud/sensors/bigquery.py b/airflow/providers/google/cloud/sensors/bigquery.py index dfe6fc8d87a5a..28771fd87a31a 100644 --- a/airflow/providers/google/cloud/sensors/bigquery.py +++ b/airflow/providers/google/cloud/sensors/bigquery.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Bigquery sensor. -""" +"""This module contains a Google Bigquery sensor.""" from typing import Optional, Sequence, Union from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook diff --git a/airflow/providers/google/cloud/sensors/bigquery_dts.py b/airflow/providers/google/cloud/sensors/bigquery_dts.py index e68086dcb7156..4fc09fd90d054 100644 --- a/airflow/providers/google/cloud/sensors/bigquery_dts.py +++ b/airflow/providers/google/cloud/sensors/bigquery_dts.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google BigQuery Data Transfer Service sensor. -""" +"""This module contains a Google BigQuery Data Transfer Service sensor.""" from typing import Optional, Sequence, Set, Tuple, Union from google.api_core.retry import Retry diff --git a/airflow/providers/google/cloud/sensors/bigtable.py b/airflow/providers/google/cloud/sensors/bigtable.py index e61d9079bfa6a..3fbfd2d32cfe6 100644 --- a/airflow/providers/google/cloud/sensors/bigtable.py +++ b/airflow/providers/google/cloud/sensors/bigtable.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Cloud Bigtable sensor. -""" +"""This module contains Google Cloud Bigtable sensor.""" from typing import Optional, Sequence, Union import google.api_core.exceptions diff --git a/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py b/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py index cec64d3d693e9..75c8ac0c608ce 100644 --- a/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +++ b/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Transfer sensor. -""" +"""This module contains a Google Cloud Transfer sensor.""" from typing import Optional, Sequence, Set, Union from airflow.providers.google.cloud.hooks.cloud_storage_transfer_service import CloudDataTransferServiceHook diff --git a/airflow/providers/google/cloud/sensors/dataproc.py b/airflow/providers/google/cloud/sensors/dataproc.py index 8b1263faa2601..873bc60454686 100644 --- a/airflow/providers/google/cloud/sensors/dataproc.py +++ b/airflow/providers/google/cloud/sensors/dataproc.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Dataproc Job sensor. -""" +"""This module contains a Dataproc Job sensor.""" # pylint: disable=C0302 from google.cloud.dataproc_v1beta2.types import JobStatus diff --git a/airflow/providers/google/cloud/sensors/gcs.py b/airflow/providers/google/cloud/sensors/gcs.py index 5a3d1993aa7c5..3b064159050f5 100644 --- a/airflow/providers/google/cloud/sensors/gcs.py +++ b/airflow/providers/google/cloud/sensors/gcs.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Cloud Storage sensors. -""" +"""This module contains Google Cloud Storage sensors.""" import os from datetime import datetime diff --git a/airflow/providers/google/cloud/sensors/pubsub.py b/airflow/providers/google/cloud/sensors/pubsub.py index f844649521f2d..c5bbf6dc05a48 100644 --- a/airflow/providers/google/cloud/sensors/pubsub.py +++ b/airflow/providers/google/cloud/sensors/pubsub.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google PubSub sensor. -""" +"""This module contains a Google PubSub sensor.""" import warnings from typing import Any, Callable, Dict, List, Optional, Sequence, Union diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py b/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py index 529d41e7572ea..c703abca47232 100644 --- a/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +++ b/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google BigQuery to BigQuery operator. -""" +"""This module contains Google BigQuery to BigQuery operator.""" import warnings from typing import Dict, List, Optional, Sequence, Union diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py b/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py index b02661f4a447f..89ddb8621652d 100644 --- a/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google BigQuery to Google Cloud Storage operator. -""" +"""This module contains Google BigQuery to Google Cloud Storage operator.""" import warnings from typing import Any, Dict, List, Optional, Sequence, Union diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py b/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py index 9f93d30b14f1d..32f52310596b6 100644 --- a/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +++ b/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google BigQuery to MySQL operator. -""" +"""This module contains Google BigQuery to MySQL operator.""" from typing import Optional, Sequence, Union from airflow.models import BaseOperator diff --git a/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py b/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py index c0fbb07bb119a..1504eddd16352 100644 --- a/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py @@ -247,18 +247,14 @@ def _upload_to_gcs(self, files_to_upload: Dict[str, Any]): @classmethod def generate_data_dict(cls, names: Iterable[str], values: Any) -> Dict[str, Any]: - """ - Generates data structure that will be stored as file in GCS. - """ + """Generates data structure that will be stored as file in GCS.""" return {n: cls.convert_value(v) for n, v in zip(names, values)} @classmethod def convert_value( # pylint: disable=too-many-return-statements cls, value: Optional[Any] ) -> Optional[Any]: - """ - Convert value to BQ type. - """ + """Convert value to BQ type.""" if not value: return value elif isinstance(value, (str, int, float, bool, dict)): @@ -286,9 +282,7 @@ def convert_value( # pylint: disable=too-many-return-statements @classmethod def convert_array_types(cls, value: Union[List[Any], SortedSet]) -> List[Any]: - """ - Maps convert_value over array. - """ + """Maps convert_value over array.""" return [cls.convert_value(nested_value) for nested_value in value] @classmethod @@ -325,9 +319,7 @@ def convert_map_type(cls, value: OrderedMapSerializedKey) -> List[Dict[str, Any] @classmethod def generate_schema_dict(cls, name: str, type_: Any) -> Dict[str, Any]: - """ - Generates BQ schema. - """ + """Generates BQ schema.""" field_schema: Dict[str, Any] = {} field_schema.update({'name': name}) field_schema.update({'type_': cls.get_bq_type(type_)}) @@ -339,9 +331,7 @@ def generate_schema_dict(cls, name: str, type_: Any) -> Dict[str, Any]: @classmethod def get_bq_fields(cls, type_: Any) -> List[Dict[str, Any]]: - """ - Converts non simple type value to BQ representation. - """ + """Converts non simple type value to BQ representation.""" if cls.is_simple_type(type_): return [] @@ -364,30 +354,22 @@ def get_bq_fields(cls, type_: Any) -> List[Dict[str, Any]]: @staticmethod def is_simple_type(type_: Any) -> bool: - """ - Check if type is a simple type. - """ + """Check if type is a simple type.""" return type_.cassname in CassandraToGCSOperator.CQL_TYPE_MAP @staticmethod def is_array_type(type_: Any) -> bool: - """ - Check if type is an array type. - """ + """Check if type is an array type.""" return type_.cassname in ['ListType', 'SetType'] @staticmethod def is_record_type(type_: Any) -> bool: - """ - Checks the record type. - """ + """Checks the record type.""" return type_.cassname in ['UserType', 'TupleType', 'MapType'] @classmethod def get_bq_type(cls, type_: Any) -> str: - """ - Converts type to equivalent BQ type. - """ + """Converts type to equivalent BQ type.""" if cls.is_simple_type(type_): return CassandraToGCSOperator.CQL_TYPE_MAP[type_.cassname] elif cls.is_record_type(type_): @@ -399,9 +381,7 @@ def get_bq_type(cls, type_: Any) -> str: @classmethod def get_bq_mode(cls, type_: Any) -> str: - """ - Converts type to equivalent BQ mode. - """ + """Converts type to equivalent BQ mode.""" if cls.is_array_type(type_) or type_.cassname == 'MapType': return 'REPEATED' elif cls.is_record_type(type_) or cls.is_simple_type(type_): diff --git a/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py b/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py index f76ed52bcb44a..8cf783a3d3639 100644 --- a/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Facebook Ad Reporting to GCS operators. -""" +"""This module contains Facebook Ad Reporting to GCS operators.""" import csv import tempfile from typing import Any, Dict, List, Optional, Sequence, Union diff --git a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py index 04db28cd7e0cc..240a4162cfd6a 100644 --- a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +++ b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Storage to BigQuery operator. -""" +"""This module contains a Google Cloud Storage to BigQuery operator.""" import json from typing import Optional, Sequence, Union diff --git a/airflow/providers/google/cloud/transfers/gcs_to_gcs.py b/airflow/providers/google/cloud/transfers/gcs_to_gcs.py index 9b125988b7aae..3fd96c8efd16e 100644 --- a/airflow/providers/google/cloud/transfers/gcs_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/gcs_to_gcs.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Storage operator. -""" +"""This module contains a Google Cloud Storage operator.""" import warnings from typing import Optional, Sequence, Union diff --git a/airflow/providers/google/cloud/transfers/gcs_to_sftp.py b/airflow/providers/google/cloud/transfers/gcs_to_sftp.py index ba8727f8b36a4..550fed3ab80f4 100644 --- a/airflow/providers/google/cloud/transfers/gcs_to_sftp.py +++ b/airflow/providers/google/cloud/transfers/gcs_to_sftp.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Cloud Storage to SFTP operator. -""" +"""This module contains Google Cloud Storage to SFTP operator.""" import os from tempfile import NamedTemporaryFile from typing import Optional, Sequence, Union @@ -148,9 +146,7 @@ def _copy_single_object( source_object: str, destination_path: str, ) -> None: - """ - Helper function to copy single object. - """ + """Helper function to copy single object.""" self.log.info( "Executing copy of gs://%s/%s to %s", self.source_bucket, diff --git a/airflow/providers/google/cloud/transfers/local_to_gcs.py b/airflow/providers/google/cloud/transfers/local_to_gcs.py index b9e9467e96f6b..63f20cd5655e5 100644 --- a/airflow/providers/google/cloud/transfers/local_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/local_to_gcs.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains operator for uploading local file(s) to GCS. -""" +"""This module contains operator for uploading local file(s) to GCS.""" import os import warnings from glob import glob @@ -111,9 +109,7 @@ def __init__( self.impersonation_chain = impersonation_chain def execute(self, context): - """ - Uploads a file or list of files to Google Cloud Storage - """ + """Uploads a file or list of files to Google Cloud Storage""" hook = GCSHook( google_cloud_storage_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, diff --git a/airflow/providers/google/cloud/transfers/mssql_to_gcs.py b/airflow/providers/google/cloud/transfers/mssql_to_gcs.py index d29dede605485..0157b9a395c00 100644 --- a/airflow/providers/google/cloud/transfers/mssql_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/mssql_to_gcs.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -MsSQL to GCS operator. -""" +"""MsSQL to GCS operator.""" import decimal from typing import Dict diff --git a/airflow/providers/google/cloud/transfers/mysql_to_gcs.py b/airflow/providers/google/cloud/transfers/mysql_to_gcs.py index 90f288769322e..8aedfa4186105 100644 --- a/airflow/providers/google/cloud/transfers/mysql_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/mysql_to_gcs.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -MySQL to GCS operator. -""" +"""MySQL to GCS operator.""" import base64 import calendar @@ -70,9 +68,7 @@ def __init__(self, *, mysql_conn_id='mysql_default', ensure_utc=False, **kwargs) self.ensure_utc = ensure_utc def query(self): - """ - Queries mysql and returns a cursor to the results. - """ + """Queries mysql and returns a cursor to the results.""" mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id) conn = mysql.get_conn() cursor = conn.cursor() diff --git a/airflow/providers/google/cloud/transfers/postgres_to_gcs.py b/airflow/providers/google/cloud/transfers/postgres_to_gcs.py index 74bdaa65afd6a..d820df9b66e4f 100644 --- a/airflow/providers/google/cloud/transfers/postgres_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/postgres_to_gcs.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -PostgreSQL to GCS operator. -""" +"""PostgreSQL to GCS operator.""" import datetime import json @@ -65,9 +63,7 @@ def __init__(self, *, postgres_conn_id='postgres_default', **kwargs): self.postgres_conn_id = postgres_conn_id def query(self): - """ - Queries Postgres and returns a cursor to the results. - """ + """Queries Postgres and returns a cursor to the results.""" hook = PostgresHook(postgres_conn_id=self.postgres_conn_id) conn = hook.get_conn() cursor = conn.cursor() diff --git a/airflow/providers/google/cloud/transfers/presto_to_gcs.py b/airflow/providers/google/cloud/transfers/presto_to_gcs.py index ba7eafe184250..7543f83d4fc94 100644 --- a/airflow/providers/google/cloud/transfers/presto_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/presto_to_gcs.py @@ -93,9 +93,7 @@ def executemany(self, *args, **kwargs): return self.cursor.executemany(*args, **kwargs) def peekone(self) -> Any: - """ - Return the next row without consuming it. - """ + """Return the next row without consuming it.""" self.initialized = True element = self.cursor.fetchone() self.rows.insert(0, element) @@ -139,9 +137,7 @@ def __next__(self) -> Any: return result def __iter__(self) -> "_PrestoToGCSPrestoCursorAdapter": - """ - Return self to make cursors compatible to the iteration protocol - """ + """Return self to make cursors compatible to the iteration protocol""" return self @@ -185,9 +181,7 @@ def __init__(self, *, presto_conn_id: str = "presto_default", **kwargs): self.presto_conn_id = presto_conn_id def query(self): - """ - Queries presto and returns a cursor to the results. - """ + """Queries presto and returns a cursor to the results.""" presto = PrestoHook(presto_conn_id=self.presto_conn_id) conn = presto.get_conn() cursor = conn.cursor() diff --git a/airflow/providers/google/cloud/transfers/sftp_to_gcs.py b/airflow/providers/google/cloud/transfers/sftp_to_gcs.py index e42bf74f55d43..2f54600ae67ca 100644 --- a/airflow/providers/google/cloud/transfers/sftp_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/sftp_to_gcs.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains SFTP to Google Cloud Storage operator. -""" +"""This module contains SFTP to Google Cloud Storage operator.""" import os from tempfile import NamedTemporaryFile from typing import Optional, Sequence, Union @@ -156,9 +154,7 @@ def _copy_single_object( source_path: str, destination_object: str, ) -> None: - """ - Helper function to copy single object. - """ + """Helper function to copy single object.""" self.log.info( "Executing copy of %s to gs://%s/%s", source_path, diff --git a/airflow/providers/google/cloud/transfers/sql_to_gcs.py b/airflow/providers/google/cloud/transfers/sql_to_gcs.py index 0c836be9d1da9..29dde3fde4189 100644 --- a/airflow/providers/google/cloud/transfers/sql_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/sql_to_gcs.py @@ -15,10 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Base operator for SQL to GCS operators. -""" - +"""Base operator for SQL to GCS operators.""" import abc import json import warnings @@ -259,9 +256,7 @@ def convert_type(self, value, schema_type): """Convert a value from DBAPI to output-friendly formats.""" def _get_col_type_dict(self): - """ - Return a dict of column name and column type based on self.schema if not None. - """ + """Return a dict of column name and column type based on self.schema if not None.""" schema = [] if isinstance(self.schema, str): schema = json.loads(self.schema) diff --git a/airflow/providers/google/cloud/utils/credentials_provider.py b/airflow/providers/google/cloud/utils/credentials_provider.py index f7e8880a323de..141f187447eee 100644 --- a/airflow/providers/google/cloud/utils/credentials_provider.py +++ b/airflow/providers/google/cloud/utils/credentials_provider.py @@ -305,9 +305,7 @@ def _log_debug(self, *args, **kwargs) -> None: def get_credentials_and_project_id(*args, **kwargs) -> Tuple[google.auth.credentials.Credentials, str]: - """ - Returns the Credentials object for Google API and the associated project_id. - """ + """Returns the Credentials object for Google API and the associated project_id.""" return _CredentialProvider(*args, **kwargs).get_credentials_and_project() diff --git a/airflow/providers/google/cloud/utils/field_sanitizer.py b/airflow/providers/google/cloud/utils/field_sanitizer.py index a80a3004d25b1..1961697bfca60 100644 --- a/airflow/providers/google/cloud/utils/field_sanitizer.py +++ b/airflow/providers/google/cloud/utils/field_sanitizer.py @@ -161,8 +161,6 @@ def _sanitize(self, dictionary, remaining_field_spec, current_path): ) def sanitize(self, body) -> None: - """ - Sanitizes the body according to specification. - """ + """Sanitizes the body according to specification.""" for elem in self._sanitize_specs: self._sanitize(body, elem, "") diff --git a/airflow/providers/google/cloud/utils/mlengine_operator_utils.py b/airflow/providers/google/cloud/utils/mlengine_operator_utils.py index 0a7b809294538..2cd89bd8c06d0 100644 --- a/airflow/providers/google/cloud/utils/mlengine_operator_utils.py +++ b/airflow/providers/google/cloud/utils/mlengine_operator_utils.py @@ -16,9 +16,7 @@ # under the License. # -""" -This module contains helper functions for MLEngine operators. -""" +"""This module contains helper functions for MLEngine operators.""" import base64 import json diff --git a/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py b/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py index 3d9133df55684..8f0f92b27ae03 100644 --- a/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py +++ b/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py @@ -94,9 +94,7 @@ def metric_fn(inst): class JsonCoder: - """ - JSON encoder/decoder. - """ + """JSON encoder/decoder.""" @staticmethod def encode(x): @@ -111,9 +109,7 @@ def decode(x): @beam.ptransform_fn def MakeSummary(pcoll, metric_fn, metric_keys): # pylint: disable=invalid-name - """ - Summary PTransofrm used in Dataflow. - """ + """Summary PTransofrm used in Dataflow.""" return ( pcoll | "ApplyMetricFnPerInstance" >> beam.Map(metric_fn) @@ -129,9 +125,7 @@ def MakeSummary(pcoll, metric_fn, metric_keys): # pylint: disable=invalid-name def run(argv=None): - """ - Helper for obtaining prediction summary. - """ + """Helper for obtaining prediction summary.""" parser = argparse.ArgumentParser() parser.add_argument( "--prediction_path", diff --git a/airflow/providers/google/common/hooks/base_google.py b/airflow/providers/google/common/hooks/base_google.py index 248e8ff0e1508..9b47c4c43e8d9 100644 --- a/airflow/providers/google/common/hooks/base_google.py +++ b/airflow/providers/google/common/hooks/base_google.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud API base hook. -""" +"""This module contains a Google Cloud API base hook.""" import functools import json import logging @@ -172,9 +170,7 @@ def __init__( self._cached_project_id: Optional[str] = None def _get_credentials_and_project_id(self) -> Tuple[google.auth.credentials.Credentials, Optional[str]]: - """ - Returns the Credentials object for Google API and the associated project_id - """ + """Returns the Credentials object for Google API and the associated project_id""" if self._cached_credentials is not None: return self._cached_credentials, self._cached_project_id @@ -208,16 +204,12 @@ def _get_credentials_and_project_id(self) -> Tuple[google.auth.credentials.Crede return credentials, project_id def _get_credentials(self) -> google.auth.credentials.Credentials: - """ - Returns the Credentials object for Google API - """ + """Returns the Credentials object for Google API""" credentials, _ = self._get_credentials_and_project_id() return credentials def _get_access_token(self) -> str: - """ - Returns a valid access token from Google API Credentials - """ + """Returns a valid access token from Google API Credentials""" return self._get_credentials().token def _authorize(self) -> google_auth_httplib2.AuthorizedHttp: diff --git a/airflow/providers/google/common/hooks/discovery_api.py b/airflow/providers/google/common/hooks/discovery_api.py index fce19bb756b28..7b84e0fd5fcd6 100644 --- a/airflow/providers/google/common/hooks/discovery_api.py +++ b/airflow/providers/google/common/hooks/discovery_api.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -This module allows you to connect to the Google Discovery API Service and query it. -""" +"""This module allows you to connect to the Google Discovery API Service and query it.""" from typing import Optional, Sequence, Union from googleapiclient.discovery import Resource, build diff --git a/airflow/providers/google/common/utils/id_token_credentials.py b/airflow/providers/google/common/utils/id_token_credentials.py index d4f92d5af90bf..fcd1f8ad707b4 100644 --- a/airflow/providers/google/common/utils/id_token_credentials.py +++ b/airflow/providers/google/common/utils/id_token_credentials.py @@ -108,9 +108,7 @@ def _load_credentials_from_file( def _get_explicit_environ_credentials( target_audience: Optional[str], ) -> Optional[google_auth_credentials.Credentials]: - """ - Gets credentials from the GOOGLE_APPLICATION_CREDENTIALS environment variable. - """ + """Gets credentials from the GOOGLE_APPLICATION_CREDENTIALS environment variable.""" explicit_file = os.environ.get(environment_vars.CREDENTIALS) if explicit_file is None: diff --git a/airflow/providers/google/marketing_platform/hooks/analytics.py b/airflow/providers/google/marketing_platform/hooks/analytics.py index 48af65f562ebc..70b6979bfe224 100644 --- a/airflow/providers/google/marketing_platform/hooks/analytics.py +++ b/airflow/providers/google/marketing_platform/hooks/analytics.py @@ -24,9 +24,7 @@ class GoogleAnalyticsHook(GoogleBaseHook): - """ - Hook for Google Analytics 360. - """ + """Hook for Google Analytics 360.""" def __init__(self, api_version: str = "v3", *args, **kwargs): super().__init__(*args, **kwargs) @@ -49,9 +47,7 @@ def _paginate(self, resource: Resource, list_args: Optional[Dict[str, Any]] = No return result def get_conn(self) -> Resource: - """ - Retrieves connection to Google Analytics 360. - """ + """Retrieves connection to Google Analytics 360.""" if not self._conn: http_authorized = self._authorize() self._conn = build( @@ -63,9 +59,7 @@ def get_conn(self) -> Resource: return self._conn def list_accounts(self) -> List[Dict[str, Any]]: - """ - Lists accounts list from Google Analytics 360. - """ + """Lists accounts list from Google Analytics 360.""" self.log.info("Retrieving accounts list...") conn = self.get_conn() accounts = conn.management().accounts() # pylint: disable=no-member diff --git a/airflow/providers/google/marketing_platform/hooks/campaign_manager.py b/airflow/providers/google/marketing_platform/hooks/campaign_manager.py index adbef5d71f323..280cd1cc9a8f4 100644 --- a/airflow/providers/google/marketing_platform/hooks/campaign_manager.py +++ b/airflow/providers/google/marketing_platform/hooks/campaign_manager.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Campaign Manager hook. -""" +"""This module contains Google Campaign Manager hook.""" from typing import Any, Dict, List, Optional, Sequence, Union from googleapiclient import http @@ -28,9 +26,7 @@ class GoogleCampaignManagerHook(GoogleBaseHook): - """ - Hook for Google Campaign Manager. - """ + """Hook for Google Campaign Manager.""" _conn = None # type: Optional[Resource] @@ -49,9 +45,7 @@ def __init__( self.api_version = api_version def get_conn(self) -> Resource: - """ - Retrieves connection to Campaign Manager. - """ + """Retrieves connection to Campaign Manager.""" if not self._conn: http_authorized = self._authorize() self._conn = build( diff --git a/airflow/providers/google/marketing_platform/hooks/display_video.py b/airflow/providers/google/marketing_platform/hooks/display_video.py index a180a7cba3fc8..87896331b9cf5 100644 --- a/airflow/providers/google/marketing_platform/hooks/display_video.py +++ b/airflow/providers/google/marketing_platform/hooks/display_video.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google DisplayVideo hook. -""" +"""This module contains Google DisplayVideo hook.""" from typing import Any, Dict, List, Optional, Sequence, Union @@ -27,9 +25,7 @@ class GoogleDisplayVideo360Hook(GoogleBaseHook): - """ - Hook for Google Display & Video 360. - """ + """Hook for Google Display & Video 360.""" _conn = None # type: Optional[Any] @@ -48,9 +44,7 @@ def __init__( self.api_version = api_version def get_conn(self) -> Resource: - """ - Retrieves connection to DisplayVideo. - """ + """Retrieves connection to DisplayVideo.""" if not self._conn: http_authorized = self._authorize() self._conn = build( @@ -62,9 +56,7 @@ def get_conn(self) -> Resource: return self._conn def get_conn_to_display_video(self) -> Resource: - """ - Retrieves connection to DisplayVideo. - """ + """Retrieves connection to DisplayVideo.""" if not self._conn: http_authorized = self._authorize() self._conn = build( @@ -143,10 +135,7 @@ def get_query(self, query_id: str) -> dict: def list_queries( self, ) -> List[Dict]: - """ - Retrieves stored queries. - - """ + """Retrieves stored queries.""" response = ( self.get_conn() # pylint: disable=no-member .queries() diff --git a/airflow/providers/google/marketing_platform/hooks/search_ads.py b/airflow/providers/google/marketing_platform/hooks/search_ads.py index f6342aaba5021..760ab927f90ff 100644 --- a/airflow/providers/google/marketing_platform/hooks/search_ads.py +++ b/airflow/providers/google/marketing_platform/hooks/search_ads.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Search Ads 360 hook. -""" +"""This module contains Google Search Ads 360 hook.""" from typing import Any, Dict, Optional, Sequence, Union from googleapiclient.discovery import build @@ -26,9 +24,7 @@ class GoogleSearchAdsHook(GoogleBaseHook): - """ - Hook for Google Search Ads 360. - """ + """Hook for Google Search Ads 360.""" _conn = None # type: Optional[Any] @@ -47,9 +43,7 @@ def __init__( self.api_version = api_version def get_conn(self): - """ - Retrieves connection to Google SearchAds. - """ + """Retrieves connection to Google SearchAds.""" if not self._conn: http_authorized = self._authorize() self._conn = build( diff --git a/airflow/providers/google/marketing_platform/operators/analytics.py b/airflow/providers/google/marketing_platform/operators/analytics.py index 079f5343a651c..c911914dc0e2a 100644 --- a/airflow/providers/google/marketing_platform/operators/analytics.py +++ b/airflow/providers/google/marketing_platform/operators/analytics.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Analytics 360 operators. -""" +"""This module contains Google Analytics 360 operators.""" import csv from tempfile import NamedTemporaryFile from typing import Dict, Optional, Sequence, Union, Any, List diff --git a/airflow/providers/google/marketing_platform/operators/campaign_manager.py b/airflow/providers/google/marketing_platform/operators/campaign_manager.py index 6939779b98aaa..b257854485fa4 100644 --- a/airflow/providers/google/marketing_platform/operators/campaign_manager.py +++ b/airflow/providers/google/marketing_platform/operators/campaign_manager.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google CampaignManager operators. -""" +"""This module contains Google CampaignManager operators.""" import json import tempfile import uuid diff --git a/airflow/providers/google/marketing_platform/operators/display_video.py b/airflow/providers/google/marketing_platform/operators/display_video.py index bb99c203748bf..7f023e1ffbc1e 100644 --- a/airflow/providers/google/marketing_platform/operators/display_video.py +++ b/airflow/providers/google/marketing_platform/operators/display_video.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google DisplayVideo operators. -""" +"""This module contains Google DisplayVideo operators.""" import csv import json import shutil diff --git a/airflow/providers/google/marketing_platform/operators/search_ads.py b/airflow/providers/google/marketing_platform/operators/search_ads.py index a37331fa407e3..3cf16c0731b06 100644 --- a/airflow/providers/google/marketing_platform/operators/search_ads.py +++ b/airflow/providers/google/marketing_platform/operators/search_ads.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Search Ads operators. -""" +"""This module contains Google Search Ads operators.""" import json from tempfile import NamedTemporaryFile from typing import Any, Dict, Optional, Sequence, Union diff --git a/airflow/providers/google/marketing_platform/sensors/campaign_manager.py b/airflow/providers/google/marketing_platform/sensors/campaign_manager.py index de11003356fcc..7c021ba6b8274 100644 --- a/airflow/providers/google/marketing_platform/sensors/campaign_manager.py +++ b/airflow/providers/google/marketing_platform/sensors/campaign_manager.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Campaign Manager sensor. -""" +"""This module contains Google Campaign Manager sensor.""" from typing import Dict, Optional, Sequence, Union from airflow.providers.google.marketing_platform.hooks.campaign_manager import GoogleCampaignManagerHook diff --git a/airflow/providers/google/marketing_platform/sensors/display_video.py b/airflow/providers/google/marketing_platform/sensors/display_video.py index 7cb7da701ea50..e859ba9084985 100644 --- a/airflow/providers/google/marketing_platform/sensors/display_video.py +++ b/airflow/providers/google/marketing_platform/sensors/display_video.py @@ -15,9 +15,7 @@ # specific language governing permissions and limitations # under the License. -""" -Sensor for detecting the completion of DV360 reports. -""" +"""Sensor for detecting the completion of DV360 reports.""" from typing import Optional, Sequence, Union from airflow import AirflowException diff --git a/airflow/providers/google/marketing_platform/sensors/search_ads.py b/airflow/providers/google/marketing_platform/sensors/search_ads.py index 2df25f2f6bb05..32d4f5a587777 100644 --- a/airflow/providers/google/marketing_platform/sensors/search_ads.py +++ b/airflow/providers/google/marketing_platform/sensors/search_ads.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains Google Search Ads sensor. -""" +"""This module contains Google Search Ads sensor.""" from typing import Optional, Sequence, Union from airflow.providers.google.marketing_platform.hooks.search_ads import GoogleSearchAdsHook diff --git a/airflow/providers/google/suite/hooks/sheets.py b/airflow/providers/google/suite/hooks/sheets.py index 69527653a9f47..3e4b62f1e5612 100644 --- a/airflow/providers/google/suite/hooks/sheets.py +++ b/airflow/providers/google/suite/hooks/sheets.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -This module contains a Google Sheets API hook -""" +"""This module contains a Google Sheets API hook""" from typing import Any, Dict, List, Optional, Sequence, Union diff --git a/airflow/providers/google/suite/transfers/gcs_to_gdrive.py b/airflow/providers/google/suite/transfers/gcs_to_gdrive.py index bbac490f91139..7427c36e9e293 100644 --- a/airflow/providers/google/suite/transfers/gcs_to_gdrive.py +++ b/airflow/providers/google/suite/transfers/gcs_to_gdrive.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains a Google Cloud Storage to Google Drive transfer operator. -""" +"""This module contains a Google Cloud Storage to Google Drive transfer operator.""" import tempfile from typing import Optional, Sequence, Union diff --git a/airflow/providers/grpc/hooks/grpc.py b/airflow/providers/grpc/hooks/grpc.py index ccf8c2e413a26..1e2446f90b8ed 100644 --- a/airflow/providers/grpc/hooks/grpc.py +++ b/airflow/providers/grpc/hooks/grpc.py @@ -105,9 +105,7 @@ def get_conn(self) -> grpc.Channel: def run( self, stub_class: Callable, call_func: str, streaming: bool = False, data: Optional[dict] = None ) -> Generator: - """ - Call gRPC function and yield response to caller - """ + """Call gRPC function and yield response to caller""" if data is None: data = {} with self.get_conn() as channel: diff --git a/airflow/providers/hashicorp/secrets/vault.py b/airflow/providers/hashicorp/secrets/vault.py index 03b036803b214..e81d8ba8ffe7e 100644 --- a/airflow/providers/hashicorp/secrets/vault.py +++ b/airflow/providers/hashicorp/secrets/vault.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Objects relating to sourcing connections & variables from Hashicorp Vault -""" +"""Objects relating to sourcing connections & variables from Hashicorp Vault""" from typing import Optional from airflow.providers.hashicorp._internal_client.vault_client import _VaultClient # noqa diff --git a/airflow/providers/imap/sensors/imap_attachment.py b/airflow/providers/imap/sensors/imap_attachment.py index a0c2b101c0bfb..4fcbaa7a81f6d 100644 --- a/airflow/providers/imap/sensors/imap_attachment.py +++ b/airflow/providers/imap/sensors/imap_attachment.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module allows you to poke for attachments on a mail server. -""" +"""This module allows you to poke for attachments on a mail server.""" from airflow.providers.imap.hooks.imap import ImapHook from airflow.sensors.base_sensor_operator import BaseSensorOperator from airflow.utils.decorators import apply_defaults diff --git a/airflow/providers/jenkins/hooks/jenkins.py b/airflow/providers/jenkins/hooks/jenkins.py index a3910d159db16..d4bf0ad36c992 100644 --- a/airflow/providers/jenkins/hooks/jenkins.py +++ b/airflow/providers/jenkins/hooks/jenkins.py @@ -25,9 +25,7 @@ class JenkinsHook(BaseHook): - """ - Hook to manage connection to jenkins server - """ + """Hook to manage connection to jenkins server""" def __init__(self, conn_id: str = 'jenkins_default') -> None: super().__init__() @@ -46,7 +44,5 @@ def __init__(self, conn_id: str = 'jenkins_default') -> None: self.jenkins_server = jenkins.Jenkins(url, connection.login, connection.password) def get_jenkins_server(self) -> jenkins.Jenkins: - """ - Get jenkins server - """ + """Get jenkins server""" return self.jenkins_server diff --git a/airflow/providers/jenkins/operators/jenkins_job_trigger.py b/airflow/providers/jenkins/operators/jenkins_job_trigger.py index ec6cf8d7b5ce1..96d5acd05de9a 100644 --- a/airflow/providers/jenkins/operators/jenkins_job_trigger.py +++ b/airflow/providers/jenkins/operators/jenkins_job_trigger.py @@ -181,9 +181,7 @@ def poll_job_in_queue(self, location: str, jenkins_server: Jenkins) -> int: ) def get_hook(self) -> JenkinsHook: - """ - Instantiate jenkins hook - """ + """Instantiate jenkins hook""" return JenkinsHook(self.jenkins_connection_id) def execute(self, context: Mapping[Any, Any]) -> Optional[str]: diff --git a/airflow/providers/microsoft/azure/hooks/azure_batch.py b/airflow/providers/microsoft/azure/hooks/azure_batch.py index b4b9a570765c4..5b6ccd801a3ae 100644 --- a/airflow/providers/microsoft/azure/hooks/azure_batch.py +++ b/airflow/providers/microsoft/azure/hooks/azure_batch.py @@ -44,9 +44,7 @@ def __init__(self, azure_batch_conn_id: str = 'azure_batch_default') -> None: self.extra = self._connection().extra_dejson def _connection(self) -> Connection: - """ - Get connected to azure batch service - """ + """Get connected to azure batch service""" conn = self.get_connection(self.conn_id) return conn diff --git a/airflow/providers/microsoft/azure/hooks/azure_container_volume.py b/airflow/providers/microsoft/azure/hooks/azure_container_volume.py index 9e81616b9a4f6..de1e7a6c12550 100644 --- a/airflow/providers/microsoft/azure/hooks/azure_container_volume.py +++ b/airflow/providers/microsoft/azure/hooks/azure_container_volume.py @@ -35,9 +35,7 @@ def __init__(self, wasb_conn_id: str = 'wasb_default') -> None: self.conn_id = wasb_conn_id def get_storagekey(self) -> str: - """ - Get Azure File Volume storage key - """ + """Get Azure File Volume storage key""" conn = self.get_connection(self.conn_id) service_options = conn.extra_dejson @@ -51,9 +49,7 @@ def get_storagekey(self) -> str: def get_file_volume( self, mount_name: str, share_name: str, storage_account_name: str, read_only: bool = False ) -> Volume: - """ - Get Azure File Volume - """ + """Get Azure File Volume""" return Volume( name=mount_name, azure_file=AzureFileVolume( diff --git a/airflow/providers/microsoft/azure/hooks/azure_cosmos.py b/airflow/providers/microsoft/azure/hooks/azure_cosmos.py index 8baf59c615d02..c73e0e4ef6090 100644 --- a/airflow/providers/microsoft/azure/hooks/azure_cosmos.py +++ b/airflow/providers/microsoft/azure/hooks/azure_cosmos.py @@ -54,9 +54,7 @@ def __init__(self, azure_cosmos_conn_id: str = 'azure_cosmos_default') -> None: self.default_collection_name = None def get_conn(self) -> CosmosClient: - """ - Return a cosmos db client. - """ + """Return a cosmos db client.""" if not self._conn: conn = self.get_connection(self.conn_id) extras = conn.extra_dejson @@ -93,9 +91,7 @@ def __get_collection_name(self, collection_name: Optional[str] = None) -> str: return coll_name def does_collection_exist(self, collection_name: str, database_name: str) -> bool: - """ - Checks if a collection exists in CosmosDB. - """ + """Checks if a collection exists in CosmosDB.""" if collection_name is None: raise AirflowBadRequest("Collection name cannot be None.") @@ -114,9 +110,7 @@ def does_collection_exist(self, collection_name: str, database_name: str) -> boo return True def create_collection(self, collection_name: str, database_name: Optional[str] = None) -> None: - """ - Creates a new collection in the CosmosDB database. - """ + """Creates a new collection in the CosmosDB database.""" if collection_name is None: raise AirflowBadRequest("Collection name cannot be None.") @@ -139,9 +133,7 @@ def create_collection(self, collection_name: str, database_name: Optional[str] = ) def does_database_exist(self, database_name: str) -> bool: - """ - Checks if a database exists in CosmosDB. - """ + """Checks if a database exists in CosmosDB.""" if database_name is None: raise AirflowBadRequest("Database name cannot be None.") @@ -159,9 +151,7 @@ def does_database_exist(self, database_name: str) -> bool: return True def create_database(self, database_name: str) -> None: - """ - Creates a new database in CosmosDB. - """ + """Creates a new database in CosmosDB.""" if database_name is None: raise AirflowBadRequest("Database name cannot be None.") @@ -181,18 +171,14 @@ def create_database(self, database_name: str) -> None: self.get_conn().CreateDatabase({"id": database_name}) def delete_database(self, database_name: str) -> None: - """ - Deletes an existing database in CosmosDB. - """ + """Deletes an existing database in CosmosDB.""" if database_name is None: raise AirflowBadRequest("Database name cannot be None.") self.get_conn().DeleteDatabase(get_database_link(database_name)) def delete_collection(self, collection_name: str, database_name: Optional[str] = None) -> None: - """ - Deletes an existing collection in the CosmosDB database. - """ + """Deletes an existing collection in the CosmosDB database.""" if collection_name is None: raise AirflowBadRequest("Collection name cannot be None.") @@ -231,9 +217,7 @@ def upsert_document(self, document, database_name=None, collection_name=None, do def insert_documents( self, documents, database_name: Optional[str] = None, collection_name: Optional[str] = None ) -> list: - """ - Insert a list of new documents into an existing collection in the CosmosDB database. - """ + """Insert a list of new documents into an existing collection in the CosmosDB database.""" if documents is None: raise AirflowBadRequest("You cannot insert empty documents") @@ -253,9 +237,7 @@ def insert_documents( def delete_document( self, document_id: str, database_name: Optional[str] = None, collection_name: Optional[str] = None ) -> None: - """ - Delete an existing document out of a collection in the CosmosDB database. - """ + """Delete an existing document out of a collection in the CosmosDB database.""" if document_id is None: raise AirflowBadRequest("Cannot delete a document without an id") @@ -270,9 +252,7 @@ def delete_document( def get_document( self, document_id: str, database_name: Optional[str] = None, collection_name: Optional[str] = None ): - """ - Get a document from an existing collection in the CosmosDB database. - """ + """Get a document from an existing collection in the CosmosDB database.""" if document_id is None: raise AirflowBadRequest("Cannot get a document without an id") @@ -294,9 +274,7 @@ def get_documents( collection_name: Optional[str] = None, partition_key: Optional[str] = None, ) -> Optional[list]: - """ - Get a list of documents from an existing collection in the CosmosDB database via SQL query. - """ + """Get a list of documents from an existing collection in the CosmosDB database via SQL query.""" if sql_string is None: raise AirflowBadRequest("SQL query string cannot be None") @@ -318,21 +296,15 @@ def get_documents( def get_database_link(database_id: str) -> str: - """ - Get Azure CosmosDB database link - """ + """Get Azure CosmosDB database link""" return "dbs/" + database_id def get_collection_link(database_id: str, collection_id: str) -> str: - """ - Get Azure CosmosDB collection link - """ + """Get Azure CosmosDB collection link""" return get_database_link(database_id) + "/colls/" + collection_id def get_document_link(database_id: str, collection_id: str, document_id: str) -> str: - """ - Get Azure CosmosDB document link - """ + """Get Azure CosmosDB document link""" return get_collection_link(database_id, collection_id) + "/docs/" + document_id diff --git a/airflow/providers/microsoft/azure/log/wasb_task_handler.py b/airflow/providers/microsoft/azure/log/wasb_task_handler.py index 5e3dc40bdc029..4fef64d0a599f 100644 --- a/airflow/providers/microsoft/azure/log/wasb_task_handler.py +++ b/airflow/providers/microsoft/azure/log/wasb_task_handler.py @@ -53,9 +53,7 @@ def __init__( @cached_property def hook(self): - """ - Returns WasbHook. - """ + """Returns WasbHook.""" remote_conn_id = conf.get('logging', 'REMOTE_LOG_CONN_ID') try: from airflow.providers.microsoft.azure.hooks.wasb import WasbHook @@ -77,9 +75,7 @@ def set_context(self, ti) -> None: self.upload_on_close = not ti.raw def close(self) -> None: - """ - Close and upload local log file to remote storage Wasb. - """ + """Close and upload local log file to remote storage Wasb.""" # When application exit, system shuts down all handlers by # calling close method. Here we check if logger is already # closed to prevent uploading the log to remote storage multiple diff --git a/airflow/providers/microsoft/azure/operators/azure_batch.py b/airflow/providers/microsoft/azure/operators/azure_batch.py index 12e8cf99f82e7..2e012a4ed12c1 100644 --- a/airflow/providers/microsoft/azure/operators/azure_batch.py +++ b/airflow/providers/microsoft/azure/operators/azure_batch.py @@ -378,10 +378,7 @@ def on_kill(self) -> None: self.log.info("Azure Batch job (%s) terminated: %s", self.batch_job_id, response) def get_hook(self) -> AzureBatchHook: - """ - Create and return an AzureBatchHook. - - """ + """Create and return an AzureBatchHook.""" return AzureBatchHook(azure_batch_conn_id=self.azure_batch_conn_id) def clean_up(self, pool_id: Optional[str] = None, job_id: Optional[str] = None) -> None: diff --git a/airflow/providers/microsoft/azure/secrets/azure_key_vault.py b/airflow/providers/microsoft/azure/secrets/azure_key_vault.py index 9d98959a58b20..fc40c88db8f47 100644 --- a/airflow/providers/microsoft/azure/secrets/azure_key_vault.py +++ b/airflow/providers/microsoft/azure/secrets/azure_key_vault.py @@ -73,9 +73,7 @@ def __init__( @cached_property def client(self) -> SecretClient: - """ - Create a Azure Key Vault client. - """ + """Create a Azure Key Vault client.""" credential = DefaultAzureCredential() client = SecretClient(vault_url=self.vault_url, credential=credential, **self.kwargs) return client diff --git a/airflow/providers/microsoft/mssql/hooks/mssql.py b/airflow/providers/microsoft/mssql/hooks/mssql.py index 55c1f2b188c8d..3e5ec6adbae9f 100644 --- a/airflow/providers/microsoft/mssql/hooks/mssql.py +++ b/airflow/providers/microsoft/mssql/hooks/mssql.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Microsoft SQLServer hook module -""" +"""Microsoft SQLServer hook module""" import warnings @@ -27,9 +25,7 @@ class MsSqlHook(DbApiHook): - """ - Interact with Microsoft SQL Server. - """ + """Interact with Microsoft SQL Server.""" conn_name_attr = 'mssql_conn_id' default_conn_name = 'mssql_default' @@ -51,9 +47,7 @@ def __init__(self, *args, **kwargs) -> None: def get_conn( self, ) -> pymssql.connect: # pylint: disable=protected-access # pylint: disable=c-extension-no-member - """ - Returns a mssql connection object - """ + """Returns a mssql connection object""" conn = self.get_connection( self.mssql_conn_id # type: ignore[attr-defined] # pylint: disable=no-member ) diff --git a/airflow/providers/mongo/hooks/mongo.py b/airflow/providers/mongo/hooks/mongo.py index 71bd3b53d2923..5d71438e8a98d 100644 --- a/airflow/providers/mongo/hooks/mongo.py +++ b/airflow/providers/mongo/hooks/mongo.py @@ -76,9 +76,7 @@ def __exit__( self.close_conn() def get_conn(self) -> MongoClient: - """ - Fetches PyMongo Client - """ + """Fetches PyMongo Client""" if self.client is not None: return self.client diff --git a/airflow/providers/mysql/hooks/mysql.py b/airflow/providers/mysql/hooks/mysql.py index ff882c06bd1aa..ee175b9980782 100644 --- a/airflow/providers/mysql/hooks/mysql.py +++ b/airflow/providers/mysql/hooks/mysql.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module allows to connect to a MySQL database. -""" +"""This module allows to connect to a MySQL database.""" import json from typing import Dict, Optional, Tuple @@ -51,9 +49,7 @@ def __init__(self, *args, **kwargs) -> None: self.connection = kwargs.pop("connection", None) def set_autocommit(self, conn: Connection, autocommit: bool) -> None: # noqa: D403 - """ - MySql connection sets autocommit in a different way. - """ + """MySql connection sets autocommit in a different way.""" conn.autocommit(autocommit) def get_autocommit(self, conn: Connection) -> bool: # noqa: D403 @@ -162,9 +158,7 @@ def get_uri(self) -> str: return uri def bulk_load(self, table: str, tmp_file: str) -> None: - """ - Loads a tab-delimited file into a database table - """ + """Loads a tab-delimited file into a database table""" conn = self.get_conn() cur = conn.cursor() cur.execute( @@ -178,9 +172,7 @@ def bulk_load(self, table: str, tmp_file: str) -> None: conn.commit() def bulk_dump(self, table: str, tmp_file: str) -> None: - """ - Dumps a database table into a tab-delimited file - """ + """Dumps a database table into a tab-delimited file""" conn = self.get_conn() cur = conn.cursor() cur.execute( diff --git a/airflow/providers/odbc/hooks/odbc.py b/airflow/providers/odbc/hooks/odbc.py index 41b6383e2f1af..3eabdf2e63f66 100644 --- a/airflow/providers/odbc/hooks/odbc.py +++ b/airflow/providers/odbc/hooks/odbc.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains ODBC hook. -""" +"""This module contains ODBC hook.""" from typing import Optional, Any from urllib.parse import quote_plus @@ -70,25 +68,19 @@ def __init__( @property def connection(self): - """ - ``airflow.Connection`` object with connection id ``odbc_conn_id`` - """ + """``airflow.Connection`` object with connection id ``odbc_conn_id``""" if not self._connection: self._connection = self.get_connection(getattr(self, self.conn_name_attr)) return self._connection @property def database(self) -> Optional[str]: - """ - Database provided in init if exists; otherwise, ``schema`` from ``Connection`` object. - """ + """Database provided in init if exists; otherwise, ``schema`` from ``Connection`` object.""" return self._database or self.connection.schema @property def sqlalchemy_scheme(self) -> Optional[str]: - """ - Database provided in init if exists; otherwise, ``schema`` from ``Connection`` object. - """ + """Database provided in init if exists; otherwise, ``schema`` from ``Connection`` object.""" return ( self._sqlalchemy_scheme or self.connection_extra_lower.get('sqlalchemy_scheme') @@ -106,9 +98,7 @@ def connection_extra_lower(self) -> dict: @property def driver(self) -> Optional[str]: - """ - Driver from init param if given; else try to find one in connection extra. - """ + """Driver from init param if given; else try to find one in connection extra.""" if not self._driver: driver = self.connection_extra_lower.get('driver') if driver: @@ -117,9 +107,7 @@ def driver(self) -> Optional[str]: @property def dsn(self) -> Optional[str]: - """ - DSN from init param if given; else try to find one in connection extra. - """ + """DSN from init param if given; else try to find one in connection extra.""" if not self._dsn: dsn = self.connection_extra_lower.get('dsn') if dsn: @@ -196,16 +184,12 @@ def clean_bool(val): # pylint: disable=inconsistent-return-statements return {k: clean_bool(v) for k, v in merged_connect_kwargs.items()} def get_conn(self) -> pyodbc.Connection: - """ - Returns a pyodbc connection object. - """ + """Returns a pyodbc connection object.""" conn = pyodbc.connect(self.odbc_connection_string, **self.connect_kwargs) return conn def get_uri(self) -> str: - """ - URI invoked in :py:meth:`~airflow.hooks.dbapi_hook.DbApiHook.get_sqlalchemy_engine` method - """ + """URI invoked in :py:meth:`~airflow.hooks.dbapi_hook.DbApiHook.get_sqlalchemy_engine` method""" quoted_conn_str = quote_plus(self.odbc_connection_string) uri = f"{self.sqlalchemy_scheme}:///?odbc_connect={quoted_conn_str}" return uri @@ -213,9 +197,7 @@ def get_uri(self) -> str: def get_sqlalchemy_connection( self, connect_kwargs: Optional[dict] = None, engine_kwargs: Optional[dict] = None ) -> Any: - """ - Sqlalchemy connection object - """ + """Sqlalchemy connection object""" engine = self.get_sqlalchemy_engine(engine_kwargs=engine_kwargs) cnx = engine.connect(**(connect_kwargs or {})) return cnx diff --git a/airflow/providers/openfaas/hooks/openfaas.py b/airflow/providers/openfaas/hooks/openfaas.py index f54462be62115..5ee17935f0b40 100644 --- a/airflow/providers/openfaas/hooks/openfaas.py +++ b/airflow/providers/openfaas/hooks/openfaas.py @@ -52,9 +52,7 @@ def get_conn(self): return conn def deploy_function(self, overwrite_function_if_exist: bool, body: Dict[str, Any]) -> None: - """ - Deploy OpenFaaS function - """ + """Deploy OpenFaaS function""" if overwrite_function_if_exist: self.log.info("Function already exist %s going to update", self.function_name) self.update_function(body) @@ -70,9 +68,7 @@ def deploy_function(self, overwrite_function_if_exist: bool, body: Dict[str, Any self.log.info("Function deployed %s", self.function_name) def invoke_async_function(self, body: Dict[str, Any]) -> None: - """ - Invoking function - """ + """Invoking function""" url = self.get_conn().host + self.INVOKE_ASYNC_FUNCTION + self.function_name self.log.info("Invoking function %s", url) response = requests.post(url, body) @@ -83,9 +79,7 @@ def invoke_async_function(self, body: Dict[str, Any]) -> None: raise AirflowException('failed to invoke function') def update_function(self, body: Dict[str, Any]) -> None: - """ - Update OpenFaaS function - """ + """Update OpenFaaS function""" url = self.get_conn().host + self.UPDATE_FUNCTION self.log.info("Updating function %s", url) response = requests.put(url, body) @@ -97,9 +91,7 @@ def update_function(self, body: Dict[str, Any]) -> None: self.log.info("Function was updated") def does_function_exist(self) -> bool: - """ - Whether OpenFaaS function exists or not - """ + """Whether OpenFaaS function exists or not""" url = self.get_conn().host + self.GET_FUNCTION + self.function_name response = requests.get(url) diff --git a/airflow/providers/opsgenie/hooks/opsgenie_alert.py b/airflow/providers/opsgenie/hooks/opsgenie_alert.py index a8e07b1c4af8c..9e5aa6b6b8e08 100644 --- a/airflow/providers/opsgenie/hooks/opsgenie_alert.py +++ b/airflow/providers/opsgenie/hooks/opsgenie_alert.py @@ -45,9 +45,7 @@ def __init__(self, opsgenie_conn_id: str = 'opsgenie_default', *args, **kwargs) super().__init__(http_conn_id=opsgenie_conn_id, *args, **kwargs) # type: ignore[misc] def _get_api_key(self) -> str: - """ - Get Opsgenie api_key for creating alert - """ + """Get Opsgenie api_key for creating alert""" conn = self.get_connection(self.http_conn_id) api_key = conn.password if not api_key: diff --git a/airflow/providers/opsgenie/operators/opsgenie_alert.py b/airflow/providers/opsgenie/operators/opsgenie_alert.py index 3c4e77486e656..faea2d720e3b4 100644 --- a/airflow/providers/opsgenie/operators/opsgenie_alert.py +++ b/airflow/providers/opsgenie/operators/opsgenie_alert.py @@ -138,8 +138,6 @@ def _build_opsgenie_payload(self) -> Dict[str, Any]: return payload def execute(self, context) -> None: - """ - Call the OpsgenieAlertHook to post message - """ + """Call the OpsgenieAlertHook to post message""" self.hook = OpsgenieAlertHook(self.opsgenie_conn_id) self.hook.execute(self._build_opsgenie_payload()) diff --git a/airflow/providers/oracle/hooks/oracle.py b/airflow/providers/oracle/hooks/oracle.py index 99a372bd90172..2a1ccffc9850d 100644 --- a/airflow/providers/oracle/hooks/oracle.py +++ b/airflow/providers/oracle/hooks/oracle.py @@ -26,9 +26,7 @@ class OracleHook(DbApiHook): - """ - Interact with Oracle SQL. - """ + """Interact with Oracle SQL.""" conn_name_attr = 'oracle_conn_id' default_conn_name = 'oracle_default' diff --git a/airflow/providers/papermill/operators/papermill.py b/airflow/providers/papermill/operators/papermill.py index 4abbc8036ec96..cbf528d4c7875 100644 --- a/airflow/providers/papermill/operators/papermill.py +++ b/airflow/providers/papermill/operators/papermill.py @@ -27,9 +27,7 @@ @attr.s(auto_attribs=True) class NoteBook(File): - """ - Jupyter notebook - """ + """Jupyter notebook""" type_hint: Optional[str] = "jupyter_notebook" parameters: Optional[Dict] = {} diff --git a/airflow/providers/postgres/hooks/postgres.py b/airflow/providers/postgres/hooks/postgres.py index 979b784570a82..2d67d79210a64 100644 --- a/airflow/providers/postgres/hooks/postgres.py +++ b/airflow/providers/postgres/hooks/postgres.py @@ -76,9 +76,7 @@ def _get_cursor(self, raw_cursor: str) -> CursorType: raise ValueError('Invalid cursor passed {}'.format(_cursor)) def get_conn(self) -> connection: - """ - Establishes a connection to a postgres database. - """ + """Establishes a connection to a postgres database.""" conn_id = getattr(self, self.conn_name_attr) conn = self.connection or self.get_connection(conn_id) @@ -131,15 +129,11 @@ def copy_expert(self, sql: str, filename: str) -> None: conn.commit() def bulk_load(self, table: str, tmp_file: str) -> None: - """ - Loads a tab-delimited file into a database table - """ + """Loads a tab-delimited file into a database table""" self.copy_expert("COPY {table} FROM STDIN".format(table=table), tmp_file) def bulk_dump(self, table: str, tmp_file: str) -> None: - """ - Dumps a database table into a tab-delimited file - """ + """Dumps a database table into a tab-delimited file""" self.copy_expert("COPY {table} TO STDOUT".format(table=table), tmp_file) # pylint: disable=signature-differs diff --git a/airflow/providers/presto/hooks/presto.py b/airflow/providers/presto/hooks/presto.py index 4d4fddd0b4ae6..26575d3252a43 100644 --- a/airflow/providers/presto/hooks/presto.py +++ b/airflow/providers/presto/hooks/presto.py @@ -29,9 +29,7 @@ class PrestoException(Exception): - """ - Presto exception - """ + """Presto exception""" def _boolify(value): @@ -116,28 +114,21 @@ def _strip_sql(sql: str) -> str: return sql.strip().rstrip(';') def get_records(self, hql, parameters: Optional[dict] = None): - """ - Get a set of records from Presto - """ + """Get a set of records from Presto""" try: return super().get_records(self._strip_sql(hql), parameters) except DatabaseError as e: raise PrestoException(e) def get_first(self, hql: str, parameters: Optional[dict] = None) -> Any: - """ - Returns only the first row, regardless of how many rows the query - returns. - """ + """Returns only the first row, regardless of how many rows the query returns.""" try: return super().get_first(self._strip_sql(hql), parameters) except DatabaseError as e: raise PrestoException(e) def get_pandas_df(self, hql, parameters=None, **kwargs): - """ - Get a pandas dataframe from a sql query. - """ + """Get a pandas dataframe from a sql query.""" import pandas cursor = self.get_cursor() @@ -160,9 +151,7 @@ def run( autocommit: bool = False, parameters: Optional[dict] = None, ) -> None: - """ - Execute the statement against Presto. Can be used to create views. - """ + """Execute the statement against Presto. Can be used to create views.""" return super().run(sql=self._strip_sql(hql), parameters=parameters) def insert_rows( diff --git a/airflow/providers/qubole/hooks/qubole_check.py b/airflow/providers/qubole/hooks/qubole_check.py index d648939d197eb..1c6bdf43e2eac 100644 --- a/airflow/providers/qubole/hooks/qubole_check.py +++ b/airflow/providers/qubole/hooks/qubole_check.py @@ -32,9 +32,7 @@ def isint(value) -> bool: - """ - Whether Qubole column are integer - """ + """Whether Qubole column are integer""" try: int(value) return True @@ -43,9 +41,7 @@ def isint(value) -> bool: def isfloat(value) -> bool: - """ - Whether Qubole column are float - """ + """Whether Qubole column are float""" try: float(value) return True @@ -54,9 +50,7 @@ def isfloat(value) -> bool: def isbool(value) -> bool: - """ - Whether Qubole column are boolean - """ + """Whether Qubole column are boolean""" try: return value.lower() in ["true", "false"] except ValueError: @@ -64,9 +58,7 @@ def isbool(value) -> bool: def parse_first_row(row_list) -> List[Union[bool, float, int, str]]: - """ - Parse Qubole first record list - """ + """Parse Qubole first record list""" record_list = [] first_row = row_list[0] if row_list else "" @@ -83,9 +75,7 @@ def parse_first_row(row_list) -> List[Union[bool, float, int, str]]: class QuboleCheckHook(QuboleHook): - """ - Qubole check hook - """ + """Qubole check hook""" def __init__(self, context, *args, **kwargs) -> None: super().__init__(*args, **kwargs) @@ -109,9 +99,7 @@ def handle_failure_retry(context) -> None: cmd.cancel() def get_first(self, sql): # pylint: disable=unused-argument - """ - Get Qubole query first record list - """ + """Get Qubole query first record list""" self.execute(context=self.context) query_result = self.get_query_results() row_list = list(filter(None, query_result.split(ROW_DELIM))) @@ -119,9 +107,7 @@ def get_first(self, sql): # pylint: disable=unused-argument return record_list def get_query_results(self) -> Optional[str]: - """ - Get Qubole query result - """ + """Get Qubole query result""" if self.cmd is not None: cmd_id = self.cmd.id self.log.info("command id: %d", cmd_id) diff --git a/airflow/providers/qubole/operators/qubole_check.py b/airflow/providers/qubole/operators/qubole_check.py index 68bdb0f39d8b0..fc1561c28eeb0 100644 --- a/airflow/providers/qubole/operators/qubole_check.py +++ b/airflow/providers/qubole/operators/qubole_check.py @@ -215,9 +215,7 @@ def __setattr__(self, name: str, value: str) -> None: def get_sql_from_qbol_cmd(params) -> str: - """ - Get Qubole sql from Qubole command - """ + """Get Qubole sql from Qubole command""" sql = '' if 'query' in params: sql = params['query'] @@ -227,9 +225,7 @@ def get_sql_from_qbol_cmd(params) -> str: def handle_airflow_exception(airflow_exception, hook): - """ - Qubole check handle Airflow exception - """ + """Qubole check handle Airflow exception""" cmd = hook.cmd if cmd is not None: if cmd.is_success(cmd.status): diff --git a/airflow/providers/qubole/sensors/qubole.py b/airflow/providers/qubole/sensors/qubole.py index bf6098225b675..de65a8a55d976 100644 --- a/airflow/providers/qubole/sensors/qubole.py +++ b/airflow/providers/qubole/sensors/qubole.py @@ -26,9 +26,7 @@ class QuboleSensor(BaseSensorOperator): - """ - Base class for all Qubole Sensors - """ + """Base class for all Qubole Sensors""" template_fields = ('data', 'qubole_conn_id') diff --git a/airflow/providers/redis/hooks/redis.py b/airflow/providers/redis/hooks/redis.py index 19754eae6c5f6..340da6099a1b6 100644 --- a/airflow/providers/redis/hooks/redis.py +++ b/airflow/providers/redis/hooks/redis.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -RedisHook module -""" +"""RedisHook module""" from redis import Redis from airflow.hooks.base_hook import BaseHook @@ -49,9 +47,7 @@ def __init__(self, redis_conn_id: str = 'redis_default') -> None: self.db = None def get_conn(self): - """ - Returns a Redis connection. - """ + """Returns a Redis connection.""" conn = self.get_connection(self.redis_conn_id) self.host = conn.host self.port = conn.port diff --git a/airflow/providers/redis/sensors/redis_key.py b/airflow/providers/redis/sensors/redis_key.py index 4fbbdb31884ce..a6fc2919689a7 100644 --- a/airflow/providers/redis/sensors/redis_key.py +++ b/airflow/providers/redis/sensors/redis_key.py @@ -23,9 +23,7 @@ class RedisKeySensor(BaseSensorOperator): - """ - Checks for the existence of a key in a Redis - """ + """Checks for the existence of a key in a Redis""" template_fields = ('key',) ui_color = '#f0eee4' diff --git a/airflow/providers/salesforce/hooks/salesforce.py b/airflow/providers/salesforce/hooks/salesforce.py index 9824aeae958f5..3a32f8b5640da 100644 --- a/airflow/providers/salesforce/hooks/salesforce.py +++ b/airflow/providers/salesforce/hooks/salesforce.py @@ -61,9 +61,7 @@ def __init__(self, conn_id: str) -> None: self.conn = None def get_conn(self) -> api.Salesforce: - """ - Sign into Salesforce, only if we are not already signed in. - """ + """Sign into Salesforce, only if we are not already signed in.""" if not self.conn: connection = self.get_connection(self.conn_id) extras = connection.extra_dejson diff --git a/airflow/providers/salesforce/sensors/tableau_job_status.py b/airflow/providers/salesforce/sensors/tableau_job_status.py index ee36f9db002e6..b2d7992812b8e 100644 --- a/airflow/providers/salesforce/sensors/tableau_job_status.py +++ b/airflow/providers/salesforce/sensors/tableau_job_status.py @@ -23,9 +23,7 @@ class TableauJobFailedException(AirflowException): - """ - An exception that indicates that a Job failed to complete. - """ + """An exception that indicates that a Job failed to complete.""" class TableauJobStatusSensor(BaseSensorOperator): diff --git a/airflow/providers/samba/hooks/samba.py b/airflow/providers/samba/hooks/samba.py index 8a20fbefdee6b..97fd42f1851d7 100644 --- a/airflow/providers/samba/hooks/samba.py +++ b/airflow/providers/samba/hooks/samba.py @@ -24,9 +24,7 @@ class SambaHook(BaseHook): - """ - Allows for interaction with an samba server. - """ + """Allows for interaction with an samba server.""" def __init__(self, samba_conn_id: str) -> None: super().__init__() @@ -43,9 +41,7 @@ def get_conn(self) -> SambaClient: return samba def push_from_local(self, destination_filepath: str, local_filepath: str) -> None: - """ - Push local file to samba server - """ + """Push local file to samba server""" samba = self.get_conn() if samba.exists(destination_filepath): if samba.isfile(destination_filepath): diff --git a/airflow/providers/segment/hooks/segment.py b/airflow/providers/segment/hooks/segment.py index aba01c366a3c7..1944d6d91d8c8 100644 --- a/airflow/providers/segment/hooks/segment.py +++ b/airflow/providers/segment/hooks/segment.py @@ -79,8 +79,6 @@ def get_conn(self) -> analytics: return analytics def on_error(self, error: str, items: str) -> None: - """ - Handles error callbacks when using Segment with segment_debug_mode set to True - """ + """Handles error callbacks when using Segment with segment_debug_mode set to True""" self.log.error('Encountered Segment error: %s with ' 'items: %s', error, items) raise AirflowException('Segment error: {}'.format(error)) diff --git a/airflow/providers/sendgrid/utils/emailer.py b/airflow/providers/sendgrid/utils/emailer.py index 20a22478da486..174a3a1ac51ca 100644 --- a/airflow/providers/sendgrid/utils/emailer.py +++ b/airflow/providers/sendgrid/utils/emailer.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Airflow module for emailer using sendgrid -""" +"""Airflow module for emailer using sendgrid""" import base64 import logging diff --git a/airflow/providers/sftp/hooks/sftp.py b/airflow/providers/sftp/hooks/sftp.py index 8d497cc385dfc..7f298a33b8c8c 100644 --- a/airflow/providers/sftp/hooks/sftp.py +++ b/airflow/providers/sftp/hooks/sftp.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains SFTP hook. -""" +"""This module contains SFTP hook.""" import datetime import stat from typing import Dict, List, Optional, Tuple @@ -94,9 +92,7 @@ def __init__(self, ftp_conn_id: str = 'sftp_default', *args, **kwargs) -> None: self.key_file = extra_options.get('private_key') def get_conn(self) -> pysftp.Connection: - """ - Returns an SFTP connection object - """ + """Returns an SFTP connection object""" if self.conn is None: cnopts = pysftp.CnOpts() if self.no_host_key_check: @@ -119,9 +115,7 @@ def get_conn(self) -> pysftp.Connection: return self.conn def close_conn(self) -> None: - """ - Closes the connection - """ + """Closes the connection""" if self.conn is not None: self.conn.close() self.conn = None diff --git a/airflow/providers/sftp/operators/sftp.py b/airflow/providers/sftp/operators/sftp.py index 23652f1bc996c..086cbba40f6fd 100644 --- a/airflow/providers/sftp/operators/sftp.py +++ b/airflow/providers/sftp/operators/sftp.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains SFTP operator. -""" +"""This module contains SFTP operator.""" import os from pathlib import Path from typing import Any @@ -29,9 +27,7 @@ class SFTPOperation: - """ - Operation that can be used with SFTP/ - """ + """Operation that can be used with SFTP/""" PUT = 'put' GET = 'get' diff --git a/airflow/providers/sftp/sensors/sftp.py b/airflow/providers/sftp/sensors/sftp.py index 60ec0451e182e..96cc028627783 100644 --- a/airflow/providers/sftp/sensors/sftp.py +++ b/airflow/providers/sftp/sensors/sftp.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module contains SFTP sensor. -""" +"""This module contains SFTP sensor.""" from typing import Optional from paramiko import SFTP_NO_SUCH_FILE diff --git a/airflow/providers/slack/hooks/slack_webhook.py b/airflow/providers/slack/hooks/slack_webhook.py index ff46327a1c769..2a2a8b7280860 100644 --- a/airflow/providers/slack/hooks/slack_webhook.py +++ b/airflow/providers/slack/hooks/slack_webhook.py @@ -138,9 +138,7 @@ def _build_slack_message(self) -> str: return json.dumps(cmd) def execute(self) -> None: - """ - Remote Popen (actually execute the slack webhook call) - """ + """Remote Popen (actually execute the slack webhook call)""" proxies = {} if self.proxy: # we only need https proxy for Slack, as the endpoint is https diff --git a/airflow/providers/slack/operators/slack_webhook.py b/airflow/providers/slack/operators/slack_webhook.py index 79593016e2cf7..7899aa931bfac 100644 --- a/airflow/providers/slack/operators/slack_webhook.py +++ b/airflow/providers/slack/operators/slack_webhook.py @@ -103,9 +103,7 @@ def __init__( self.hook: Optional[SlackWebhookHook] = None def execute(self, context: Dict[str, Any]) -> None: - """ - Call the SlackWebhookHook to post the provided Slack message - """ + """Call the SlackWebhookHook to post the provided Slack message""" self.hook = SlackWebhookHook( self.http_conn_id, self.webhook_token, diff --git a/airflow/providers/snowflake/hooks/snowflake.py b/airflow/providers/snowflake/hooks/snowflake.py index 55e8f26995b04..a0bbedf7165e8 100644 --- a/airflow/providers/snowflake/hooks/snowflake.py +++ b/airflow/providers/snowflake/hooks/snowflake.py @@ -103,9 +103,7 @@ def _get_conn_params(self) -> Dict[str, Optional[str]]: return conn_config def get_uri(self) -> str: - """ - Override DbApiHook get_uri method for get_sqlalchemy_engine() - """ + """Override DbApiHook get_uri method for get_sqlalchemy_engine()""" conn_config = self._get_conn_params() uri = ( 'snowflake://{user}:{password}@{account}/{database}/{schema}' @@ -114,9 +112,7 @@ def get_uri(self) -> str: return uri.format(**conn_config) def get_conn(self) -> SnowflakeConnection: - """ - Returns a snowflake.connection object - """ + """Returns a snowflake.connection object""" conn_config = self._get_conn_params() conn = connector.connect(**conn_config) return conn diff --git a/airflow/providers/snowflake/operators/snowflake.py b/airflow/providers/snowflake/operators/snowflake.py index 15a4b024a14c5..f58b3744d91a4 100644 --- a/airflow/providers/snowflake/operators/snowflake.py +++ b/airflow/providers/snowflake/operators/snowflake.py @@ -105,9 +105,7 @@ def get_hook(self) -> SnowflakeHook: ) def execute(self, context: Any) -> None: - """ - Run query on snowflake - """ + """Run query on snowflake""" self.log.info('Executing: %s', self.sql) hook = self.get_hook() hook.run(self.sql, autocommit=self.autocommit, parameters=self.parameters) diff --git a/airflow/providers/snowflake/transfers/s3_to_snowflake.py b/airflow/providers/snowflake/transfers/s3_to_snowflake.py index dffe20eddd9c8..758bec2cb1447 100644 --- a/airflow/providers/snowflake/transfers/s3_to_snowflake.py +++ b/airflow/providers/snowflake/transfers/s3_to_snowflake.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -This module contains AWS S3 to Snowflake operator. -""" +"""This module contains AWS S3 to Snowflake operator.""" from typing import Any, Optional from airflow.models import BaseOperator diff --git a/airflow/providers/sqlite/hooks/sqlite.py b/airflow/providers/sqlite/hooks/sqlite.py index a4ac44362090f..67b927304781f 100644 --- a/airflow/providers/sqlite/hooks/sqlite.py +++ b/airflow/providers/sqlite/hooks/sqlite.py @@ -22,17 +22,13 @@ class SqliteHook(DbApiHook): - """ - Interact with SQLite. - """ + """Interact with SQLite.""" conn_name_attr = 'sqlite_conn_id' default_conn_name = 'sqlite_default' def get_conn(self) -> sqlite3.dbapi2.Connection: - """ - Returns a sqlite connection object - """ + """Returns a sqlite connection object""" conn_id = getattr(self, self.conn_name_attr) airflow_conn = self.get_connection(conn_id) conn = sqlite3.connect(airflow_conn.host) diff --git a/airflow/providers/ssh/operators/ssh.py b/airflow/providers/ssh/operators/ssh.py index 333e0fbf5e71f..181fee8c1952d 100644 --- a/airflow/providers/ssh/operators/ssh.py +++ b/airflow/providers/ssh/operators/ssh.py @@ -175,8 +175,6 @@ def execute(self, context) -> Union[bytes, str, bool]: return True def tunnel(self) -> None: - """ - Get ssh tunnel - """ + """Get ssh tunnel""" ssh_client = self.ssh_hook.get_conn() # type: ignore[union-attr] ssh_client.get_transport() diff --git a/airflow/providers/vertica/hooks/vertica.py b/airflow/providers/vertica/hooks/vertica.py index deff220a182fe..acb86a5f6f456 100644 --- a/airflow/providers/vertica/hooks/vertica.py +++ b/airflow/providers/vertica/hooks/vertica.py @@ -23,18 +23,14 @@ class VerticaHook(DbApiHook): - """ - Interact with Vertica. - """ + """Interact with Vertica.""" conn_name_attr = 'vertica_conn_id' default_conn_name = 'vertica_default' supports_autocommit = True def get_conn(self) -> connect: - """ - Return verticaql connection object - """ + """Return verticaql connection object""" conn = self.get_connection(self.vertica_conn_id) # type: ignore # pylint: disable=no-member conn_config = { "user": conn.login, diff --git a/airflow/providers/yandex/hooks/yandex.py b/airflow/providers/yandex/hooks/yandex.py index d2aa6958dc2b5..5bb71a706879f 100644 --- a/airflow/providers/yandex/hooks/yandex.py +++ b/airflow/providers/yandex/hooks/yandex.py @@ -67,9 +67,7 @@ def _get_credentials(self) -> Dict[str, Any]: return {'token': oauth_token} def _get_field(self, field_name: str, default: Any = None) -> Any: - """ - Fetches a field from extras, and returns it. - """ + """Fetches a field from extras, and returns it.""" long_f = f'extra__yandexcloud__{field_name}' if hasattr(self, 'extras') and long_f in self.extras: return self.extras[long_f] diff --git a/airflow/providers/zendesk/hooks/zendesk.py b/airflow/providers/zendesk/hooks/zendesk.py index 1dada9910c3ed..bc8e921188a22 100644 --- a/airflow/providers/zendesk/hooks/zendesk.py +++ b/airflow/providers/zendesk/hooks/zendesk.py @@ -25,9 +25,7 @@ class ZendeskHook(BaseHook): - """ - A hook to talk to Zendesk - """ + """A hook to talk to Zendesk""" def __init__(self, zendesk_conn_id: str) -> None: super().__init__() diff --git a/airflow/secrets/base_secrets.py b/airflow/secrets/base_secrets.py index c5c8a4ae853b9..3d2f2576194ab 100644 --- a/airflow/secrets/base_secrets.py +++ b/airflow/secrets/base_secrets.py @@ -23,9 +23,7 @@ class BaseSecretsBackend(ABC): - """ - Abstract base class to retrieve secrets given a conn_id and construct a Connection object - """ + """Abstract base class to retrieve secrets given a conn_id and construct a Connection object""" def __init__(self, **kwargs): pass diff --git a/airflow/secrets/environment_variables.py b/airflow/secrets/environment_variables.py index 50345f1d41b44..b47760e358afd 100644 --- a/airflow/secrets/environment_variables.py +++ b/airflow/secrets/environment_variables.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Objects relating to sourcing connections from environment variables -""" +"""Objects relating to sourcing connections from environment variables""" import os from typing import Optional @@ -29,9 +27,7 @@ class EnvironmentVariablesBackend(BaseSecretsBackend): - """ - Retrieves Connection object from environment variable. - """ + """Retrieves Connection object from environment variable.""" # pylint: disable=missing-docstring def get_conn_uri(self, conn_id: str) -> Optional[str]: diff --git a/airflow/secrets/local_filesystem.py b/airflow/secrets/local_filesystem.py index ebcfae5bc55e0..2b249cc7968bd 100644 --- a/airflow/secrets/local_filesystem.py +++ b/airflow/secrets/local_filesystem.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Objects relating to retrieving connections and variables from local file -""" +"""Objects relating to retrieving connections and variables from local file""" import json import logging import os @@ -176,9 +174,7 @@ def _parse_secret_file(file_path: str) -> Dict[str, Any]: def _create_connection(conn_id: str, value: Any): - """ - Creates a connection based on a URL or JSON object. - """ + """Creates a connection based on a URL or JSON object.""" from airflow.models.connection import Connection if isinstance(value, str): @@ -237,9 +233,7 @@ def load_variables(file_path: str) -> Dict[str, str]: def load_connections(file_path) -> Dict[str, List[Any]]: - """ - This function is deprecated. Please use `airflow.secrets.local_filesystem.load_connections_dict`.", - """ + """This function is deprecated. Please use `airflow.secrets.local_filesystem.load_connections_dict`.",""" warnings.warn( "This function is deprecated. Please use `airflow.secrets.local_filesystem.load_connections_dict`.", DeprecationWarning, stacklevel=2 diff --git a/airflow/secrets/metastore.py b/airflow/secrets/metastore.py index 6d05005cfdda2..497bc4476d66b 100644 --- a/airflow/secrets/metastore.py +++ b/airflow/secrets/metastore.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Objects relating to sourcing connections from metastore database -""" +"""Objects relating to sourcing connections from metastore database""" from typing import TYPE_CHECKING, List @@ -29,9 +27,7 @@ class MetastoreBackend(BaseSecretsBackend): - """ - Retrieves Connection object from airflow metastore database. - """ + """Retrieves Connection object from airflow metastore database.""" # pylint: disable=missing-docstring @provide_session diff --git a/airflow/sensors/base_sensor_operator.py b/airflow/sensors/base_sensor_operator.py index 10a5084186f03..d98fb97ea9b9a 100644 --- a/airflow/sensors/base_sensor_operator.py +++ b/airflow/sensors/base_sensor_operator.py @@ -210,9 +210,7 @@ def execute(self, context: Dict) -> Any: self.log.info("Success criteria met. Exiting.") def _get_next_poke_interval(self, started_at, try_number): - """ - Using the similar logic which is used for exponential backoff retry delay for operators. - """ + """Using the similar logic which is used for exponential backoff retry delay for operators.""" if self.exponential_backoff: min_backoff = int(self.poke_interval * (2 ** (try_number - 2))) current_time = timezone.utcnow() diff --git a/airflow/sensors/external_task_sensor.py b/airflow/sensors/external_task_sensor.py index 89c3e281e5ab7..c32939f3d167b 100644 --- a/airflow/sensors/external_task_sensor.py +++ b/airflow/sensors/external_task_sensor.py @@ -81,9 +81,7 @@ class ExternalTaskSensor(BaseSensorOperator): @property def operator_extra_links(self): - """ - Return operator extra links - """ + """Return operator extra links""" return [ExternalTaskSensorLink()] @apply_defaults diff --git a/airflow/sensors/smart_sensor_operator.py b/airflow/sensors/smart_sensor_operator.py index 2293d02f33418..60257da442777 100644 --- a/airflow/sensors/smart_sensor_operator.py +++ b/airflow/sensors/smart_sensor_operator.py @@ -113,9 +113,7 @@ def create_new_task_handler(): return handler def _get_sensor_logger(self, si): - """ - Return logger for a sensor instance object. - """ + """Return logger for a sensor instance object.""" # The created log_id is used inside of smart sensor as the key to fetch # the corresponding in memory log handler. si.raw = False # Otherwise set_context will fail @@ -138,9 +136,7 @@ def _get_sensor_logger(self, si): return logger def close_sensor_logger(self): - """ - Close log handler for a sensor work. - """ + """Close log handler for a sensor work.""" for handler in self.log.handlers: try: handler.close() @@ -149,16 +145,12 @@ def close_sensor_logger(self): @property def ti_key(self): - """ - Key for the task instance that maps to the sensor work. - """ + """Key for the task instance that maps to the sensor work.""" return self.dag_id, self.task_id, self.execution_date @property def cache_key(self): - """ - Key used to query in smart sensor for cached sensor work. - """ + """Key used to query in smart sensor for cached sensor work.""" return self.operator, self.encoded_poke_context @@ -187,15 +179,11 @@ def set_state(self, state): self.last_poke_time = timezone.utcnow() def clear_state(self): - """ - Clear state for cached poke work. - """ + """Clear state for cached poke work.""" self.state = None def set_to_flush(self): - """ - Mark this poke work to be popped from cached dict after current loop. - """ + """Mark this poke work to be popped from cached dict after current loop.""" self.to_flush = True def is_expired(self): @@ -263,9 +251,7 @@ def should_fail_current_run(self): @property def exception_info(self): - """ - :return: exception msg. - """ + """:return: exception msg.""" return self._exception_info @property @@ -671,10 +657,7 @@ def _execute_sensor_work(self, sensor_work): self._handle_poke_exception(sensor_work) def flush_cached_sensor_poke_results(self): - """ - Flush outdated cached sensor states saved in previous loop. - - """ + """Flush outdated cached sensor states saved in previous loop.""" for key, cached_work in self.cached_dedup_works.items(): if cached_work.is_expired(): self.cached_dedup_works.pop(key, None) diff --git a/airflow/sentry.py b/airflow/sentry.py index 23a77030e4468..253534383e941 100644 --- a/airflow/sentry.py +++ b/airflow/sentry.py @@ -28,33 +28,23 @@ class DummySentry: - """ - Blank class for Sentry. - """ + """Blank class for Sentry.""" @classmethod def add_tagging(cls, task_instance): - """ - Blank function for tagging. - """ + """Blank function for tagging.""" @classmethod def add_breadcrumbs(cls, task_instance, session=None): - """ - Blank function for breadcrumbs. - """ + """Blank function for breadcrumbs.""" @classmethod def enrich_errors(cls, run): - """ - Blank function for formatting a TaskInstance._run_raw_task. - """ + """Blank function for formatting a TaskInstance._run_raw_task.""" return run def flush(self): - """ - Blank function for flushing errors. - """ + """Blank function for flushing errors.""" Sentry: DummySentry = DummySentry() @@ -66,9 +56,7 @@ def flush(self): from sentry_sdk.integrations.logging import ignore_logger class ConfiguredSentry(DummySentry): - """ - Configure Sentry SDK. - """ + """Configure Sentry SDK.""" SCOPE_TAGS = frozenset( ("task_id", "dag_id", "execution_date", "operator", "try_number") @@ -81,9 +69,7 @@ class ConfiguredSentry(DummySentry): ) def __init__(self): - """ - Initialize the Sentry SDK. - """ + """Initialize the Sentry SDK.""" ignore_logger("airflow.task") ignore_logger("airflow.jobs.backfill_job.BackfillJob") executor_name = conf.get("core", "EXECUTOR") @@ -124,9 +110,7 @@ def __init__(self): sentry_sdk.init(integrations=integrations, **sentry_config_opts) def add_tagging(self, task_instance): - """ - Function to add tagging for a task_instance. - """ + """Function to add tagging for a task_instance.""" task = task_instance.task with sentry_sdk.configure_scope() as scope: @@ -138,9 +122,7 @@ def add_tagging(self, task_instance): @provide_session def add_breadcrumbs(self, task_instance, session=None): - """ - Function to add breadcrumbs inside of a task_instance. - """ + """Function to add breadcrumbs inside of a task_instance.""" if session is None: return execution_date = task_instance.execution_date @@ -161,9 +143,7 @@ def add_breadcrumbs(self, task_instance, session=None): sentry_sdk.add_breadcrumb(category="completed_tasks", data=data, level="info") def enrich_errors(self, func): - """ - Wrap TaskInstance._run_raw_task to support task specific tags and breadcrumbs. - """ + """Wrap TaskInstance._run_raw_task to support task specific tags and breadcrumbs.""" @wraps(func) def wrapper(task_instance, *args, session=None, **kwargs): diff --git a/airflow/serialization/json_schema.py b/airflow/serialization/json_schema.py index 58fe7557d2c24..f697b5e5b870b 100644 --- a/airflow/serialization/json_schema.py +++ b/airflow/serialization/json_schema.py @@ -50,9 +50,7 @@ def iter_errors(self, instance) -> Iterable[jsonschema.exceptions.ValidationErro def load_dag_schema_dict() -> dict: - """ - Load & return Json Schema for DAG as Python dict - """ + """Load & return Json Schema for DAG as Python dict""" schema_file_name = 'schema.json' schema_file = pkgutil.get_data(__name__, schema_file_name) @@ -64,9 +62,7 @@ def load_dag_schema_dict() -> dict: def load_dag_schema() -> Validator: - """ - Load & Validate Json Schema for DAG - """ + """Load & Validate Json Schema for DAG""" schema = load_dag_schema_dict() jsonschema.Draft7Validator.check_schema(schema) return jsonschema.Draft7Validator(schema) diff --git a/airflow/serialization/serialized_objects.py b/airflow/serialization/serialized_objects.py index 8d1eb349045b5..0a54a8d827792 100644 --- a/airflow/serialization/serialized_objects.py +++ b/airflow/serialization/serialized_objects.py @@ -83,14 +83,12 @@ class BaseSerialization: @classmethod def to_json(cls, var: Union[DAG, BaseOperator, dict, list, set, tuple]) -> str: - """Stringifies DAGs and operators contained by var and returns a JSON string of var. - """ + """Stringifies DAGs and operators contained by var and returns a JSON string of var.""" return json.dumps(cls.to_dict(var), ensure_ascii=True) @classmethod def to_dict(cls, var: Union[DAG, BaseOperator, dict, list, set, tuple]) -> dict: - """Stringifies DAGs and operators contained by var and returns a dict of var. - """ + """Stringifies DAGs and operators contained by var and returns a dict of var.""" # Don't call on this class directly - only SerializedDAG or # SerializedBaseOperator should be used as the "entrypoint" raise NotImplementedError() @@ -351,8 +349,7 @@ def task_type(self, task_type: str): @classmethod def serialize_operator(cls, op: BaseOperator) -> dict: - """Serializes operator into a JSON object. - """ + """Serializes operator into a JSON object.""" serialize_op = cls.serialize_to_json(op, cls._decorated_fields) serialize_op['_task_type'] = op.__class__.__name__ serialize_op['_task_module'] = op.__class__.__module__ @@ -372,8 +369,7 @@ def serialize_operator(cls, op: BaseOperator) -> dict: @classmethod def deserialize_operator(cls, encoded_op: Dict[str, Any]) -> BaseOperator: - """Deserializes an operator from a JSON object. - """ + """Deserializes an operator from a JSON object.""" from airflow import plugins_manager plugins_manager.initialize_extra_operators_links_plugins() @@ -577,8 +573,7 @@ def __get_constructor_defaults(): # pylint: disable=no-method-argument @classmethod def serialize_dag(cls, dag: DAG) -> dict: - """Serializes a DAG into a JSON object. - """ + """Serializes a DAG into a JSON object.""" serialize_dag = cls.serialize_to_json(dag, cls._decorated_fields) serialize_dag["tasks"] = [cls._serialize(task) for _, task in dag.task_dict.items()] @@ -587,8 +582,7 @@ def serialize_dag(cls, dag: DAG) -> dict: @classmethod def deserialize_dag(cls, encoded_dag: Dict[str, Any]) -> 'SerializedDAG': - """Deserializes a DAG from a JSON object. - """ + """Deserializes a DAG from a JSON object.""" dag = SerializedDAG(dag_id=encoded_dag['_dag_id']) for k, v in encoded_dag.items(): @@ -652,8 +646,7 @@ def deserialize_dag(cls, encoded_dag: Dict[str, Any]) -> 'SerializedDAG': @classmethod def to_dict(cls, var: Any) -> dict: - """Stringifies DAGs and operators contained by var and returns a dict of var. - """ + """Stringifies DAGs and operators contained by var and returns a dict of var.""" json_dict = { "__version": cls.SERIALIZER_VERSION, "dag": cls.serialize_dag(var) @@ -673,15 +666,11 @@ def from_dict(cls, serialized_obj: dict) -> 'SerializedDAG': class SerializedTaskGroup(TaskGroup, BaseSerialization): - """ - A JSON serializable representation of TaskGroup. - """ + """A JSON serializable representation of TaskGroup.""" @classmethod def serialize_task_group(cls, task_group: TaskGroup) -> Optional[Union[Dict[str, Any]]]: - """ - Serializes TaskGroup into a JSON object. - """ + """Serializes TaskGroup into a JSON object.""" if not task_group: return None @@ -713,9 +702,7 @@ def deserialize_task_group( parent_group: Optional[TaskGroup], task_dict: Dict[str, BaseOperator] ) -> Optional[TaskGroup]: - """ - Deserializes a TaskGroup from a JSON object. - """ + """Deserializes a TaskGroup from a JSON object.""" if not encoded_group: return None diff --git a/airflow/settings.py b/airflow/settings.py index 0fc1855e27375..d77069303f7d5 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -288,9 +288,7 @@ def configure_action_logging(): def prepare_syspath(): - """ - Ensures that certain subfolders of AIRFLOW_HOME are on the classpath - """ + """Ensures that certain subfolders of AIRFLOW_HOME are on the classpath""" if DAGS_FOLDER not in sys.path: sys.path.append(DAGS_FOLDER) diff --git a/airflow/task/task_runner/base_task_runner.py b/airflow/task/task_runner/base_task_runner.py index d2901ba1c7826..a05f702e47be7 100644 --- a/airflow/task/task_runner/base_task_runner.py +++ b/airflow/task/task_runner/base_task_runner.py @@ -141,9 +141,7 @@ def run_command(self, run_with=None): return proc def start(self): - """ - Start running the task instance in a subprocess. - """ + """Start running the task instance in a subprocess.""" raise NotImplementedError() def return_code(self): @@ -155,15 +153,11 @@ def return_code(self): raise NotImplementedError() def terminate(self): - """ - Kill the running task instance. - """ + """Kill the running task instance.""" raise NotImplementedError() def on_finish(self): - """ - A callback that should be called when this is done running. - """ + """A callback that should be called when this is done running.""" if self._cfg_path and os.path.isfile(self._cfg_path): if self.run_as_user: subprocess.call(['sudo', 'rm', self._cfg_path], close_fds=True) diff --git a/airflow/task/task_runner/standard_task_runner.py b/airflow/task/task_runner/standard_task_runner.py index 2ecbae0a71972..61ea43a0f73e6 100644 --- a/airflow/task/task_runner/standard_task_runner.py +++ b/airflow/task/task_runner/standard_task_runner.py @@ -27,9 +27,7 @@ class StandardTaskRunner(BaseTaskRunner): - """ - Standard runner for all tasks. - """ + """Standard runner for all tasks.""" def __init__(self, local_task_job): super().__init__(local_task_job) diff --git a/airflow/ti_deps/deps/__init__.py b/airflow/ti_deps/deps/__init__.py index 2a3aed1584101..9dd173958bba0 100644 --- a/airflow/ti_deps/deps/__init__.py +++ b/airflow/ti_deps/deps/__init__.py @@ -15,6 +15,4 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Implementation of specific dependencies for tasks. -""" +"""Implementation of specific dependencies for tasks.""" diff --git a/airflow/ti_deps/deps/dag_ti_slots_available_dep.py b/airflow/ti_deps/deps/dag_ti_slots_available_dep.py index 65c9dd4241211..57bc92af42964 100644 --- a/airflow/ti_deps/deps/dag_ti_slots_available_dep.py +++ b/airflow/ti_deps/deps/dag_ti_slots_available_dep.py @@ -21,9 +21,7 @@ class DagTISlotsAvailableDep(BaseTIDep): - """ - Determines whether a DAG maximum number of running tasks has been reached. - """ + """Determines whether a DAG maximum number of running tasks has been reached.""" NAME = "Task Instance Slots Available" IGNOREABLE = True diff --git a/airflow/ti_deps/deps/dag_unpaused_dep.py b/airflow/ti_deps/deps/dag_unpaused_dep.py index 6caa349c4802a..bc8cd300c1680 100644 --- a/airflow/ti_deps/deps/dag_unpaused_dep.py +++ b/airflow/ti_deps/deps/dag_unpaused_dep.py @@ -21,9 +21,7 @@ class DagUnpausedDep(BaseTIDep): - """ - Determines whether a task's DAG is not paused. - """ + """Determines whether a task's DAG is not paused.""" NAME = "Dag Not Paused" IGNOREABLE = True diff --git a/airflow/ti_deps/deps/dagrun_exists_dep.py b/airflow/ti_deps/deps/dagrun_exists_dep.py index 70b8bb87592a2..a26c629dae970 100644 --- a/airflow/ti_deps/deps/dagrun_exists_dep.py +++ b/airflow/ti_deps/deps/dagrun_exists_dep.py @@ -22,9 +22,7 @@ class DagrunRunningDep(BaseTIDep): - """ - Determines whether a task's DagRun is in valid state. - """ + """Determines whether a task's DagRun is in valid state.""" NAME = "Dagrun Running" IGNOREABLE = True diff --git a/airflow/ti_deps/deps/dagrun_id_dep.py b/airflow/ti_deps/deps/dagrun_id_dep.py index 3549fa9d514fa..e01b975ba685c 100644 --- a/airflow/ti_deps/deps/dagrun_id_dep.py +++ b/airflow/ti_deps/deps/dagrun_id_dep.py @@ -24,9 +24,7 @@ class DagrunIdDep(BaseTIDep): - """ - Dep for valid DagRun ID to schedule from scheduler - """ + """Dep for valid DagRun ID to schedule from scheduler""" NAME = "Dagrun run_id is not backfill job ID" IGNOREABLE = True diff --git a/airflow/ti_deps/deps/exec_date_after_start_date_dep.py b/airflow/ti_deps/deps/exec_date_after_start_date_dep.py index 8e01ac233d71e..0b6a0030ce11e 100644 --- a/airflow/ti_deps/deps/exec_date_after_start_date_dep.py +++ b/airflow/ti_deps/deps/exec_date_after_start_date_dep.py @@ -21,9 +21,7 @@ class ExecDateAfterStartDateDep(BaseTIDep): - """ - Determines whether a task's execution date is after start date. - """ + """Determines whether a task's execution date is after start date.""" NAME = "Execution Date" IGNOREABLE = True diff --git a/airflow/ti_deps/deps/not_in_retry_period_dep.py b/airflow/ti_deps/deps/not_in_retry_period_dep.py index 88812616892c5..5cfab757c8ed6 100644 --- a/airflow/ti_deps/deps/not_in_retry_period_dep.py +++ b/airflow/ti_deps/deps/not_in_retry_period_dep.py @@ -23,9 +23,7 @@ class NotInRetryPeriodDep(BaseTIDep): - """ - Determines whether a task is not in retry period. - """ + """Determines whether a task is not in retry period.""" NAME = "Not In Retry Period" IGNOREABLE = True diff --git a/airflow/ti_deps/deps/pool_slots_available_dep.py b/airflow/ti_deps/deps/pool_slots_available_dep.py index d4307d9d748e1..2c58013f33304 100644 --- a/airflow/ti_deps/deps/pool_slots_available_dep.py +++ b/airflow/ti_deps/deps/pool_slots_available_dep.py @@ -24,9 +24,7 @@ class PoolSlotsAvailableDep(BaseTIDep): - """ - Dep for pool slots availability. - """ + """Dep for pool slots availability.""" NAME = "Pool Slots Available" IGNOREABLE = True diff --git a/airflow/ti_deps/deps/ready_to_reschedule.py b/airflow/ti_deps/deps/ready_to_reschedule.py index 6eb538f4b85c4..4b8144844f6dc 100644 --- a/airflow/ti_deps/deps/ready_to_reschedule.py +++ b/airflow/ti_deps/deps/ready_to_reschedule.py @@ -24,9 +24,7 @@ class ReadyToRescheduleDep(BaseTIDep): - """ - Determines whether a task is ready to be rescheduled. - """ + """Determines whether a task is ready to be rescheduled.""" NAME = "Ready To Reschedule" IGNOREABLE = True diff --git a/airflow/ti_deps/deps/runnable_exec_date_dep.py b/airflow/ti_deps/deps/runnable_exec_date_dep.py index 47b03cd4a88b2..add9846b33771 100644 --- a/airflow/ti_deps/deps/runnable_exec_date_dep.py +++ b/airflow/ti_deps/deps/runnable_exec_date_dep.py @@ -22,9 +22,7 @@ class RunnableExecDateDep(BaseTIDep): - """ - Determines whether a task's execution date is valid. - """ + """Determines whether a task's execution date is valid.""" NAME = "Execution Date" IGNOREABLE = True diff --git a/airflow/ti_deps/deps/task_concurrency_dep.py b/airflow/ti_deps/deps/task_concurrency_dep.py index 1bf65b529accb..d1456f9112ce8 100644 --- a/airflow/ti_deps/deps/task_concurrency_dep.py +++ b/airflow/ti_deps/deps/task_concurrency_dep.py @@ -21,9 +21,7 @@ class TaskConcurrencyDep(BaseTIDep): - """ - This restricts the number of running task instances for a particular task. - """ + """This restricts the number of running task instances for a particular task.""" NAME = "Task Concurrency" IGNOREABLE = True diff --git a/airflow/utils/cli.py b/airflow/utils/cli.py index 6e0ea25826cd6..b37f2c96521d1 100644 --- a/airflow/utils/cli.py +++ b/airflow/utils/cli.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -Utilities module for cli -""" +"""Utilities module for cli""" import functools import getpass @@ -251,9 +249,7 @@ def sigquit_handler(sig, frame): # pylint: disable=unused-argument class ColorMode: - """ - Coloring modes. If `auto` is then automatically detected. - """ + """Coloring modes. If `auto` is then automatically detected.""" ON = "on" OFF = "off" @@ -261,9 +257,7 @@ class ColorMode: def should_use_colors(args) -> bool: - """ - Processes arguments and decides whether to enable color in output - """ + """Processes arguments and decides whether to enable color in output""" if args.color == ColorMode.ON: return True if args.color == ColorMode.OFF: diff --git a/airflow/utils/code_utils.py b/airflow/utils/code_utils.py index f62383817d3c7..77cfa42bf6116 100644 --- a/airflow/utils/code_utils.py +++ b/airflow/utils/code_utils.py @@ -25,9 +25,7 @@ def get_python_source(x: Any) -> Optional[str]: - """ - Helper function to get Python source (or not), preventing exceptions - """ + """Helper function to get Python source (or not), preventing exceptions""" if isinstance(x, str): return x diff --git a/airflow/utils/compression.py b/airflow/utils/compression.py index 7cddd296e05a3..dff8c8d3ed7ab 100644 --- a/airflow/utils/compression.py +++ b/airflow/utils/compression.py @@ -23,9 +23,7 @@ def uncompress_file(input_file_name, file_extension, dest_dir): - """ - Uncompress gz and bz2 files - """ + """Uncompress gz and bz2 files""" if file_extension.lower() not in ('.gz', '.bz2'): raise NotImplementedError("Received {} format. Only gz and bz2 " "files can currently be uncompressed." diff --git a/airflow/utils/dag_cycle_tester.py b/airflow/utils/dag_cycle_tester.py index 87ba6329ba70a..92501d66e4e6c 100644 --- a/airflow/utils/dag_cycle_tester.py +++ b/airflow/utils/dag_cycle_tester.py @@ -14,9 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -DAG Cycle tester -""" +"""DAG Cycle tester""" from collections import defaultdict, deque from airflow.exceptions import AirflowDagCycleException @@ -37,9 +35,7 @@ def test_cycle(dag): task_dict = dag.task_dict def _check_adjacent_tasks(task_id, current_task): - """ - Returns first untraversed child task, else None if all tasks traversed. - """ + """Returns first untraversed child task, else None if all tasks traversed.""" for adjacent_task in current_task.get_direct_relative_ids(): if visited[adjacent_task] == CYCLE_IN_PROGRESS: msg = f"Cycle detected in DAG. Faulty task: {task_id}" diff --git a/airflow/utils/dag_processing.py b/airflow/utils/dag_processing.py index e60b387e8fc29..3718b845130d7 100644 --- a/airflow/utils/dag_processing.py +++ b/airflow/utils/dag_processing.py @@ -53,37 +53,27 @@ class AbstractDagFileProcessorProcess(metaclass=ABCMeta): - """ - Processes a DAG file. See SchedulerJob.process_file() for more details. - """ + """Processes a DAG file. See SchedulerJob.process_file() for more details.""" @abstractmethod def start(self) -> None: - """ - Launch the process to process the file - """ + """Launch the process to process the file""" raise NotImplementedError() @abstractmethod def terminate(self, sigkill: bool = False): - """ - Terminate (and then kill) the process launched to process the file - """ + """Terminate (and then kill) the process launched to process the file""" raise NotImplementedError() @abstractmethod def kill(self) -> None: - """ - Kill the process launched to process the file, and ensure consistent state. - """ + """Kill the process launched to process the file, and ensure consistent state.""" raise NotImplementedError() @property @abstractmethod def pid(self) -> int: - """ - :return: the PID of the process launched to process the given file - """ + """:return: the PID of the process launched to process the given file""" raise NotImplementedError() @property @@ -138,9 +128,7 @@ def file_path(self) -> str: @property @abstractmethod def waitable_handle(self): - """ - A "waitable" handle that can be passed to ``multiprocessing.connection.wait()`` - """ + """A "waitable" handle that can be passed to ``multiprocessing.connection.wait()``""" raise NotImplementedError() @@ -235,9 +223,7 @@ def __init__( self._last_parsing_stat_received_at: float = time.monotonic() def start(self) -> None: - """ - Launch DagFileProcessorManager processor and start DAG parsing loop in manager. - """ + """Launch DagFileProcessorManager processor and start DAG parsing loop in manager.""" mp_start_method = self._get_multiprocessing_start_method() context = multiprocessing.get_context(mp_start_method) self._last_parsing_stat_received_at = time.monotonic() @@ -380,9 +366,7 @@ def _run_processor_manager( processor_manager.start() def heartbeat(self) -> None: - """ - Check if the DagFileProcessorManager process is alive, and process any pending messages - """ + """Check if the DagFileProcessorManager process is alive, and process any pending messages""" if not self._parent_signal_conn: raise ValueError("Process not started.") # Receive any pending messages before checking if the process has exited. @@ -404,9 +388,7 @@ def _process_message(self, message): raise RuntimeError(f"Unexpected message received of type {type(message).__name__}") def _heartbeat_manager(self): - """ - Heartbeat DAG file processor and restart it if we are not done. - """ + """Heartbeat DAG file processor and restart it if we are not done.""" if not self._parent_signal_conn: raise ValueError("Process not started.") if self._process and not self._process.is_alive(): @@ -431,25 +413,19 @@ def _heartbeat_manager(self): self.start() def _sync_metadata(self, stat): - """ - Sync metadata from stat queue and only keep the latest stat. - """ + """Sync metadata from stat queue and only keep the latest stat.""" self._done = stat.done self._all_files_processed = stat.all_files_processed self._last_parsing_stat_received_at = time.monotonic() @property def done(self) -> bool: - """ - Has DagFileProcessorManager ended? - """ + """Has DagFileProcessorManager ended?""" return self._done @property def all_files_processed(self): - """ - Have all files been processed at least once? - """ + """Have all files been processed at least once?""" return self._all_files_processed def terminate(self): @@ -585,16 +561,12 @@ def __init__(self, } def register_exit_signals(self): - """ - Register signals that stop child processes - """ + """Register signals that stop child processes""" signal.signal(signal.SIGINT, self._exit_gracefully) signal.signal(signal.SIGTERM, self._exit_gracefully) def _exit_gracefully(self, signum, frame): # pylint: disable=unused-argument - """ - Helper method to clean up DAG file processors to avoid leaving orphan processes. - """ + """Helper method to clean up DAG file processors to avoid leaving orphan processes.""" self.log.info("Exiting gracefully upon receiving signal %s", signum) self.log.debug("Current Stacktrace is: %s", '\n'.join(map(str, inspect.stack()))) self.terminate() @@ -745,9 +717,7 @@ def _add_callback_to_queue(self, request: CallbackRequest): self._file_path_queue.insert(0, request.full_filepath) def _refresh_dag_dir(self): - """ - Refresh file paths from dag dir if we haven't done it for too long. - """ + """Refresh file paths from dag dir if we haven't done it for too long.""" now = timezone.utcnow() elapsed_time_since_refresh = (now - self.last_dag_dir_refresh_time).total_seconds() if elapsed_time_since_refresh > self.dag_dir_list_interval: @@ -775,9 +745,7 @@ def _refresh_dag_dir(self): DagCode.remove_deleted_code(self._file_paths) def _print_stat(self): - """ - Occasionally print out stats about how fast the files are getting processed - """ + """Occasionally print out stats about how fast the files are getting processed""" if 0 < self.print_stats_interval < ( timezone.utcnow() - self.last_stat_print_time).total_seconds(): if self._file_paths: @@ -986,9 +954,7 @@ def set_file_paths(self, new_file_paths): self._processors = filtered_processors def wait_until_finished(self): - """ - Sleeps until all the processors are done. - """ + """Sleeps until all the processors are done.""" for processor in self._processors.values(): while not processor.done: time.sleep(0.1) @@ -1018,9 +984,7 @@ def _collect_results_from_processor(self, processor) -> None: self._file_stats[processor.file_path] = stat def collect_results(self) -> None: - """ - Collect the result from any finished DAG processors - """ + """Collect the result from any finished DAG processors""" ready = multiprocessing.connection.wait(self.waitables.keys() - [self._signal_conn], timeout=0) for sentinel in ready: @@ -1038,9 +1002,7 @@ def collect_results(self) -> None: len(self._file_path_queue)) def start_new_processes(self): - """ - Start more processors if we have enough slots and files to process - """ + """Start more processors if we have enough slots and files to process""" while self._parallelism - len(self._processors) > 0 and self._file_path_queue: file_path = self._file_path_queue.pop(0) callback_to_execute_for_file = self._callback_to_execute[file_path] @@ -1062,9 +1024,7 @@ def start_new_processes(self): self.waitables[processor.waitable_handle] = processor def prepare_file_path_queue(self): - """ - Generate more file paths to process. Result are saved in _file_path_queue. - """ + """Generate more file paths to process. Result are saved in _file_path_queue.""" self._parsing_start_time = timezone.utcnow() # If the file path is already being processed, or if a file was # processed recently, wait until the next batch @@ -1152,9 +1112,7 @@ def _find_zombies(self, session): Stats.incr('zombies_killed') def _kill_timed_out_processors(self): - """ - Kill any file processors that timeout to defend against process hangs. - """ + """Kill any file processors that timeout to defend against process hangs.""" now = timezone.utcnow() for file_path, processor in self._processors.items(): duration = now - processor.start_time @@ -1170,9 +1128,7 @@ def _kill_timed_out_processors(self): processor.kill() def max_runs_reached(self): - """ - :return: whether all file paths have been processed max_runs times - """ + """:return: whether all file paths have been processed max_runs times""" if self._max_runs == -1: # Unlimited runs. return False for stat in self._file_stats.values(): diff --git a/airflow/utils/dates.py b/airflow/utils/dates.py index 14c190ab284ab..2377e12cc2b1a 100644 --- a/airflow/utils/dates.py +++ b/airflow/utils/dates.py @@ -229,9 +229,7 @@ def infer_time_unit(time_seconds_arr): def scale_time_units(time_seconds_arr, unit): - """ - Convert an array of time durations in seconds to the specified time unit. - """ + """Convert an array of time durations in seconds to the specified time unit.""" if unit == 'minutes': return list(map(lambda x: x / 60, time_seconds_arr)) elif unit == 'hours': @@ -255,7 +253,5 @@ def days_ago(n, hour=0, minute=0, second=0, microsecond=0): def parse_execution_date(execution_date_str): - """ - Parse execution date string to datetime object. - """ + """Parse execution date string to datetime object.""" return timezone.parse(execution_date_str) diff --git a/airflow/utils/db.py b/airflow/utils/db.py index bcc283ca1a5f3..479ced65cd5ca 100644 --- a/airflow/utils/db.py +++ b/airflow/utils/db.py @@ -40,9 +40,7 @@ @provide_session def merge_conn(conn, session=None): - """ - Add new Connection. - """ + """Add new Connection.""" if not session.query(Connection).filter(Connection.conn_id == conn.conn_id).first(): session.add(conn) session.commit() @@ -50,9 +48,7 @@ def merge_conn(conn, session=None): @provide_session def add_default_pool_if_not_exists(session=None): - """ - Add default pool if it does not exist. - """ + """Add default pool if it does not exist.""" if not Pool.get_pool(Pool.DEFAULT_POOL_NAME, session=session): default_pool = Pool( pool=Pool.DEFAULT_POOL_NAME, @@ -66,9 +62,7 @@ def add_default_pool_if_not_exists(session=None): @provide_session def create_default_connections(session=None): - """ - Create default Airflow connections. - """ + """Create default Airflow connections.""" merge_conn( Connection( conn_id="airflow_db", @@ -547,9 +541,7 @@ def create_default_connections(session=None): def initdb(): - """ - Initialize Airflow database. - """ + """Initialize Airflow database.""" upgradedb() if conf.getboolean('core', 'LOAD_DEFAULT_CONNECTIONS'): @@ -606,9 +598,7 @@ def check_migrations(timeout): def upgradedb(): - """ - Upgrade the database. - """ + """Upgrade the database.""" # alembic adds significant import time, so we import it lazily from alembic import command @@ -621,9 +611,7 @@ def upgradedb(): def resetdb(): - """ - Clear out the database - """ + """Clear out the database""" log.info("Dropping tables that exist") connection = settings.engine.connect() diff --git a/airflow/utils/dot_renderer.py b/airflow/utils/dot_renderer.py index 605dd828548db..65e9a4a099fc2 100644 --- a/airflow/utils/dot_renderer.py +++ b/airflow/utils/dot_renderer.py @@ -16,9 +16,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Renderer DAG (tasks and dependencies) to the graphviz object. -""" +"""Renderer DAG (tasks and dependencies) to the graphviz object.""" from typing import List, Optional import graphviz diff --git a/airflow/utils/email.py b/airflow/utils/email.py index e8737bb10567d..b40d5d2c95ebe 100644 --- a/airflow/utils/email.py +++ b/airflow/utils/email.py @@ -35,9 +35,7 @@ def send_email(to: Union[List[str], Iterable[str]], subject: str, html_content: str, files=None, dryrun=False, cc=None, bcc=None, mime_subtype='mixed', mime_charset='utf-8', **kwargs): - """ - Send email using backend specified in EMAIL_BACKEND. - """ + """Send email using backend specified in EMAIL_BACKEND.""" backend = conf.getimport('email', 'EMAIL_BACKEND') to_list = get_email_address_list(to) to_comma_separated = ", ".join(to_list) @@ -150,9 +148,7 @@ def build_mime_message( def send_mime_email(e_from: str, e_to: List[str], mime_msg: MIMEMultipart, dryrun: bool = False) -> None: - """ - Send MIME email. - """ + """Send MIME email.""" smtp_host = conf.get('smtp', 'SMTP_HOST') smtp_port = conf.getint('smtp', 'SMTP_PORT') smtp_starttls = conf.getboolean('smtp', 'SMTP_STARTTLS') @@ -178,9 +174,7 @@ def send_mime_email(e_from: str, e_to: List[str], mime_msg: MIMEMultipart, dryru def get_email_address_list(addresses: Union[str, Iterable[str]]) -> List[str]: - """ - Get list of email addresses. - """ + """Get list of email addresses.""" if isinstance(addresses, str): return _get_email_list_from_str(addresses) diff --git a/airflow/utils/file.py b/airflow/utils/file.py index d659a9d37d915..fa623f2b94fba 100644 --- a/airflow/utils/file.py +++ b/airflow/utils/file.py @@ -29,9 +29,7 @@ def TemporaryDirectory(*args, **kwargs): # pylint: disable=invalid-name - """ - This function is deprecated. Please use `tempfile.TemporaryDirectory` - """ + """This function is deprecated. Please use `tempfile.TemporaryDirectory`""" import warnings from tempfile import TemporaryDirectory as TmpDir warnings.warn( diff --git a/airflow/utils/helpers.py b/airflow/utils/helpers.py index f9872c1416b2b..e91e1da2f2cde 100644 --- a/airflow/utils/helpers.py +++ b/airflow/utils/helpers.py @@ -32,9 +32,7 @@ def validate_key(k, max_length=250): - """ - Validates value used as a key. - """ + """Validates value used as a key.""" if not isinstance(k, str): raise TypeError("The key has to be a string") elif len(k) > max_length: @@ -49,9 +47,7 @@ def validate_key(k, max_length=250): def alchemy_to_dict(obj: Any) -> Optional[Dict]: - """ - Transforms a SQLAlchemy model instance into a dictionary - """ + """Transforms a SQLAlchemy model instance into a dictionary""" if not obj: return None output = {} @@ -64,9 +60,7 @@ def alchemy_to_dict(obj: Any) -> Optional[Dict]: def ask_yesno(question): - """ - Helper to get yes / no answer from user. - """ + """Helper to get yes / no answer from user.""" yes = {'yes', 'y'} no = {'no', 'n'} # pylint: disable=invalid-name @@ -83,9 +77,7 @@ def ask_yesno(question): def is_container(obj): - """ - Test if an object is a container (iterable) but not a string - """ + """Test if an object is a container (iterable) but not a string""" return hasattr(obj, '__iter__') and not isinstance(obj, str) @@ -105,9 +97,7 @@ def as_tuple(obj): def chunks(items: List[T], chunk_size: int) -> Generator[List[T], None, None]: - """ - Yield successive chunks of a given size from a list of items - """ + """Yield successive chunks of a given size from a list of items""" if chunk_size <= 0: raise ValueError('Chunk size must be a positive integer') for i in range(0, len(items), chunk_size): @@ -142,9 +132,7 @@ def as_flattened_list(iterable: Iterable[Iterable[T]]) -> List[T]: def parse_template_string(template_string): - """ - Parses Jinja template string. - """ + """Parses Jinja template string.""" if "{{" in template_string: # jinja mode return None, Template(template_string) else: @@ -174,9 +162,7 @@ def render_log_filename(ti, try_number, filename_template): def convert_camel_to_snake(camel_str): - """ - Converts CamelCase to snake_case. - """ + """Converts CamelCase to snake_case.""" return re.sub('(?!^)([A-Z]+)', r'_\1', camel_str).lower() @@ -196,9 +182,7 @@ def merge_dicts(dict1, dict2): def partition(pred: Callable, iterable: Iterable): - """ - Use a predicate to partition entries into false entries and true entries - """ + """Use a predicate to partition entries into false entries and true entries""" iter_1, iter_2 = tee(iterable) return filterfalse(pred, iter_1), filter(pred, iter_2) diff --git a/airflow/utils/json.py b/airflow/utils/json.py index 00cb0d43b5e03..f59d82009049b 100644 --- a/airflow/utils/json.py +++ b/airflow/utils/json.py @@ -30,9 +30,7 @@ class AirflowJsonEncoder(json.JSONEncoder): - """ - Custom Airflow json encoder implementation. - """ + """Custom Airflow json encoder implementation.""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -40,9 +38,7 @@ def __init__(self, *args, **kwargs): @staticmethod def _default(obj): - """ - Convert dates and numpy objects in a json serializable format. - """ + """Convert dates and numpy objects in a json serializable format.""" if isinstance(obj, datetime): return obj.strftime('%Y-%m-%dT%H:%M:%SZ') elif isinstance(obj, date): diff --git a/airflow/utils/log/cloudwatch_task_handler.py b/airflow/utils/log/cloudwatch_task_handler.py index f468a899f6c64..1ba2586151698 100644 --- a/airflow/utils/log/cloudwatch_task_handler.py +++ b/airflow/utils/log/cloudwatch_task_handler.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.amazon.aws.log.cloudwatch_task_handler`. -""" +"""This module is deprecated. Please use `airflow.providers.amazon.aws.log.cloudwatch_task_handler`.""" import warnings # pylint: disable=unused-import diff --git a/airflow/utils/log/colored_log.py b/airflow/utils/log/colored_log.py index b844eb61e2016..3d7a8f13e3182 100644 --- a/airflow/utils/log/colored_log.py +++ b/airflow/utils/log/colored_log.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Class responsible for colouring logs based on log level. -""" +"""Class responsible for colouring logs based on log level.""" import re import sys from logging import LogRecord diff --git a/airflow/utils/log/es_task_handler.py b/airflow/utils/log/es_task_handler.py index 2a0a19f9804c9..019fa39de944b 100644 --- a/airflow/utils/log/es_task_handler.py +++ b/airflow/utils/log/es_task_handler.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.elasticsearch.log.es_task_handler`. -""" +"""This module is deprecated. Please use `airflow.providers.elasticsearch.log.es_task_handler`.""" import warnings # pylint: disable=unused-import diff --git a/airflow/utils/log/gcs_task_handler.py b/airflow/utils/log/gcs_task_handler.py index c142f7fa0d238..63251a4e91f07 100644 --- a/airflow/utils/log/gcs_task_handler.py +++ b/airflow/utils/log/gcs_task_handler.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.log.gcs_task_handler`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.log.gcs_task_handler`.""" import warnings # pylint: disable=unused-import diff --git a/airflow/utils/log/json_formatter.py b/airflow/utils/log/json_formatter.py index b1229c54961e3..4d517deb1542b 100644 --- a/airflow/utils/log/json_formatter.py +++ b/airflow/utils/log/json_formatter.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. -""" -json_formatter module stores all related to ElasticSearch specific logger classes -""" +"""json_formatter module stores all related to ElasticSearch specific logger classes""" import json import logging @@ -27,9 +25,7 @@ class JSONFormatter(logging.Formatter): - """ - JSONFormatter instances are used to convert a log record to json. - """ + """JSONFormatter instances are used to convert a log record to json.""" # pylint: disable=too-many-arguments def __init__(self, fmt=None, datefmt=None, style='%', json_fields=None, extras=None): diff --git a/airflow/utils/log/logging_mixin.py b/airflow/utils/log/logging_mixin.py index 3b5f3434ce9ea..624f6b931e937 100644 --- a/airflow/utils/log/logging_mixin.py +++ b/airflow/utils/log/logging_mixin.py @@ -34,18 +34,14 @@ def remove_escape_codes(text: str) -> str: class LoggingMixin: - """ - Convenience super-class to have a logger configured with the class name - """ + """Convenience super-class to have a logger configured with the class name""" def __init__(self, context=None): self._set_context(context) @property def log(self) -> Logger: - """ - Returns a logger. - """ + """Returns a logger.""" try: # FIXME: LoggingMixin should have a default _log field. return self._log # type: ignore @@ -61,9 +57,7 @@ def _set_context(self, context): class ExternalLoggingMixin: - """ - Define a log handler based on an external service (e.g. ELK, StackDriver). - """ + """Define a log handler based on an external service (e.g. ELK, StackDriver).""" @abc.abstractproperty def log_name(self) -> str: @@ -71,16 +65,12 @@ def log_name(self) -> str: @abc.abstractmethod def get_external_log_url(self, task_instance, try_number) -> str: - """ - Return the URL for log visualization in the external service. - """ + """Return the URL for log visualization in the external service.""" # TODO: Formally inherit from io.IOBase class StreamLogWriter: - """ - Allows to redirect stdout and stderr to logger - """ + """Allows to redirect stdout and stderr to logger""" encoding: None = None @@ -111,9 +101,7 @@ def closed(self): # noqa: D402 return False def _propagate_log(self, message): - """ - Propagate message removing escape codes. - """ + """Propagate message removing escape codes.""" self.logger.log(self.level, remove_escape_codes(message)) def write(self, message): @@ -130,9 +118,7 @@ def write(self, message): self._buffer = '' def flush(self): - """ - Ensure all logging output has been flushed - """ + """Ensure all logging output has been flushed""" if len(self._buffer) > 0: self._propagate_log(self._buffer) self._buffer = '' @@ -167,9 +153,7 @@ def __init__(self, stream): @property def stream(self): - """ - Returns current stream. - """ + """Returns current stream.""" if self._use_stderr: return sys.stderr diff --git a/airflow/utils/log/s3_task_handler.py b/airflow/utils/log/s3_task_handler.py index b0d7a8d9d3fda..6bccff79f4e4d 100644 --- a/airflow/utils/log/s3_task_handler.py +++ b/airflow/utils/log/s3_task_handler.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.amazon.aws.log.s3_task_handler`. -""" +"""This module is deprecated. Please use `airflow.providers.amazon.aws.log.s3_task_handler`.""" import warnings # pylint: disable=unused-import diff --git a/airflow/utils/log/stackdriver_task_handler.py b/airflow/utils/log/stackdriver_task_handler.py index 903a64f509880..0b96380ad5cbc 100644 --- a/airflow/utils/log/stackdriver_task_handler.py +++ b/airflow/utils/log/stackdriver_task_handler.py @@ -14,9 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.google.cloud.log.stackdriver_task_handler`. -""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.log.stackdriver_task_handler`.""" import warnings # pylint: disable=unused-import diff --git a/airflow/utils/log/task_handler_with_custom_formatter.py b/airflow/utils/log/task_handler_with_custom_formatter.py index d80ad00a714d4..84c3b654ae621 100644 --- a/airflow/utils/log/task_handler_with_custom_formatter.py +++ b/airflow/utils/log/task_handler_with_custom_formatter.py @@ -14,9 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Custom logging formatter for Airflow -""" +"""Custom logging formatter for Airflow""" import logging from logging import StreamHandler @@ -26,9 +24,7 @@ class TaskHandlerWithCustomFormatter(StreamHandler): - """ - Custom implementation of StreamHandler, a class which writes logging records for Airflow - """ + """Custom implementation of StreamHandler, a class which writes logging records for Airflow""" def __init__(self, stream): super().__init__() diff --git a/airflow/utils/log/wasb_task_handler.py b/airflow/utils/log/wasb_task_handler.py index 2580bd7c78c20..7b41933a1ecd2 100644 --- a/airflow/utils/log/wasb_task_handler.py +++ b/airflow/utils/log/wasb_task_handler.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This module is deprecated. Please use `airflow.providers.microsoft.azure.log.wasb_task_handler`. -""" +"""This module is deprecated. Please use `airflow.providers.microsoft.azure.log.wasb_task_handler`.""" import warnings # pylint: disable=unused-import diff --git a/airflow/utils/mixins.py b/airflow/utils/mixins.py index 7ab28745745bb..fbe0b24d561bc 100644 --- a/airflow/utils/mixins.py +++ b/airflow/utils/mixins.py @@ -22,9 +22,7 @@ class MultiprocessingStartMethodMixin: - """ - Convenience class to add support for different types of multiprocessing. - """ + """Convenience class to add support for different types of multiprocessing.""" def _get_multiprocessing_start_method(self) -> str: """ diff --git a/airflow/utils/net.py b/airflow/utils/net.py index 3659819b3c4b8..f697bfc4ad2f2 100644 --- a/airflow/utils/net.py +++ b/airflow/utils/net.py @@ -22,9 +22,7 @@ def get_host_ip_address(): - """ - Fetch host ip address. - """ + """Fetch host ip address.""" return socket.gethostbyname(socket.getfqdn()) diff --git a/airflow/utils/operator_resources.py b/airflow/utils/operator_resources.py index 532aeb0eea468..b7f42ba2043ef 100644 --- a/airflow/utils/operator_resources.py +++ b/airflow/utils/operator_resources.py @@ -59,16 +59,12 @@ def __repr__(self): @property def name(self): - """ - Name of the resource. - """ + """Name of the resource.""" return self._name @property def units_str(self): - """ - The string representing the units of a resource. - """ + """The string representing the units of a resource.""" return self._units_str @property @@ -81,36 +77,28 @@ def qty(self): class CpuResource(Resource): - """ - Represents a CPU requirement in an execution environment for an operator. - """ + """Represents a CPU requirement in an execution environment for an operator.""" def __init__(self, qty): super().__init__('CPU', 'core(s)', qty) class RamResource(Resource): - """ - Represents a RAM requirement in an execution environment for an operator. - """ + """Represents a RAM requirement in an execution environment for an operator.""" def __init__(self, qty): super().__init__('RAM', 'MB', qty) class DiskResource(Resource): - """ - Represents a disk requirement in an execution environment for an operator. - """ + """Represents a disk requirement in an execution environment for an operator.""" def __init__(self, qty): super().__init__('Disk', 'MB', qty) class GpuResource(Resource): - """ - Represents a GPU requirement in an execution environment for an operator. - """ + """Represents a GPU requirement in an execution environment for an operator.""" def __init__(self, qty): super().__init__('GPU', 'gpu(s)', qty) diff --git a/airflow/utils/orm_event_handlers.py b/airflow/utils/orm_event_handlers.py index 2d221cdd3cb0a..ab4954739a715 100644 --- a/airflow/utils/orm_event_handlers.py +++ b/airflow/utils/orm_event_handlers.py @@ -29,9 +29,7 @@ def setup_event_handlers(engine): - """ - Setups event handlers. - """ + """Setups event handlers.""" # pylint: disable=unused-argument, unused-variable @event.listens_for(engine, "connect") def connect(dbapi_connection, connection_record): diff --git a/airflow/utils/platform.py b/airflow/utils/platform.py index 45fd71ba19e5e..23c8b89cb53dd 100644 --- a/airflow/utils/platform.py +++ b/airflow/utils/platform.py @@ -15,9 +15,7 @@ # specific language governing permissions and limitations # under the License. -""" -Platform and system specific function. -""" +"""Platform and system specific function.""" import logging import os import pkgutil @@ -28,7 +26,8 @@ def is_tty(): """ - Checks if the standard output is s connected (is associated with a terminal device) to a tty(-like) device + Checks if the standard output is s connected (is associated with a terminal device) to a tty(-like) + device. """ if not hasattr(sys.stdout, "isatty"): return False @@ -36,9 +35,7 @@ def is_tty(): def is_terminal_support_colors() -> bool: - """ - Try to determine if the current terminal supports colors. - """ + """Try to determine if the current terminal supports colors.""" if sys.platform == "win32": return False if not is_tty(): diff --git a/airflow/utils/process_utils.py b/airflow/utils/process_utils.py index 0e639c42482fa..3ee402efd2b72 100644 --- a/airflow/utils/process_utils.py +++ b/airflow/utils/process_utils.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -Utilities for running or stopping processes -""" +"""Utilities for running or stopping processes""" import errno import logging import os diff --git a/airflow/utils/python_virtualenv.py b/airflow/utils/python_virtualenv.py index ff946442b1865..3bce32452a44e 100644 --- a/airflow/utils/python_virtualenv.py +++ b/airflow/utils/python_virtualenv.py @@ -16,9 +16,7 @@ # specific language governing permissions and limitations # under the License. # -""" -Utilities for creating a virtual environment -""" +"""Utilities for creating a virtual environment""" import os from typing import List, Optional diff --git a/airflow/utils/session.py b/airflow/utils/session.py index 8dbe2d09087e6..979c23a2fad8c 100644 --- a/airflow/utils/session.py +++ b/airflow/utils/session.py @@ -24,9 +24,7 @@ @contextlib.contextmanager def create_session(): - """ - Contextmanager that will create and teardown a session. - """ + """Contextmanager that will create and teardown a session.""" session = settings.Session() try: yield session diff --git a/airflow/utils/sqlalchemy.py b/airflow/utils/sqlalchemy.py index db01872704665..8025b4304c3c8 100644 --- a/airflow/utils/sqlalchemy.py +++ b/airflow/utils/sqlalchemy.py @@ -92,9 +92,7 @@ def process_result_value(self, value, dialect): class Interval(TypeDecorator): - """ - Base class representing a time interval. - """ + """Base class representing a time interval.""" impl = Text @@ -195,9 +193,7 @@ def with_row_locks(query, **kwargs): class CommitProhibitorGuard: - """ - Context manager class that powers prohibit_commit - """ + """Context manager class that powers prohibit_commit""" expected_commit = False diff --git a/airflow/utils/state.py b/airflow/utils/state.py index 59804a9600cd1..c857c0606867d 100644 --- a/airflow/utils/state.py +++ b/airflow/utils/state.py @@ -84,16 +84,12 @@ class State: @classmethod def color(cls, state): - """ - Returns color for a state. - """ + """Returns color for a state.""" return cls.state_color.get(state, 'white') @classmethod def color_fg(cls, state): - """ - Black&white colors for a state. - """ + """Black&white colors for a state.""" color = cls.color(state) if color in ['green', 'red']: return 'white' @@ -135,9 +131,7 @@ def color_fg(cls, state): class PokeState: - """ - Static class with poke states constants used in smart operator. - """ + """Static class with poke states constants used in smart operator.""" LANDED = 'landed' NOT_LANDED = 'not_landed' diff --git a/airflow/utils/strings.py b/airflow/utils/strings.py index 4bf9a7e0f120b..8ae735c75a7ed 100644 --- a/airflow/utils/strings.py +++ b/airflow/utils/strings.py @@ -1,6 +1,3 @@ -""" -Common utility functions with strings -""" # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -17,20 +14,17 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +"""Common utility functions with strings""" import string from random import choice def get_random_string(length=8, choices=string.ascii_letters + string.digits): - """ - Generate random string - """ + """Generate random string""" return ''.join([choice(choices) for _ in range(length)]) def to_boolean(astring): - """ - Convert a string to a boolean - """ + """Convert a string to a boolean""" return astring.lower() in ['true', 't', 'y', 'yes', '1'] diff --git a/airflow/utils/task_group.py b/airflow/utils/task_group.py index 84cc540147232..8fe278d810a26 100644 --- a/airflow/utils/task_group.py +++ b/airflow/utils/task_group.py @@ -117,16 +117,12 @@ def __init__( @classmethod def create_root(cls, dag: "DAG") -> "TaskGroup": - """ - Create a root TaskGroup with no group_id or parent. - """ + """Create a root TaskGroup with no group_id or parent.""" return cls(group_id=None, dag=dag) @property def is_root(self) -> bool: - """ - Returns True if this TaskGroup is the root TaskGroup. Otherwise False - """ + """Returns True if this TaskGroup is the root TaskGroup. Otherwise False""" return not self.group_id def __iter__(self): @@ -138,9 +134,7 @@ def __iter__(self): yield child def add(self, task: Union["BaseOperator", "TaskGroup"]) -> None: - """ - Add a task to this TaskGroup. - """ + """Add a task to this TaskGroup.""" key = task.group_id if isinstance(task, TaskGroup) else task.task_id if key in self.children: @@ -154,9 +148,7 @@ def add(self, task: Union["BaseOperator", "TaskGroup"]) -> None: @property def group_id(self) -> Optional[str]: - """ - group_id of this TaskGroup. - """ + """group_id of this TaskGroup.""" if self._parent_group and self._parent_group.prefix_group_id and self._parent_group.group_id: return self._parent_group.child_id(self._group_id) @@ -164,9 +156,7 @@ def group_id(self) -> Optional[str]: @property def label(self) -> Optional[str]: - """ - group_id excluding parent's group_id used as the node label in UI. - """ + """group_id excluding parent's group_id used as the node label in UI.""" return self._group_id def update_relative(self, other: "TaskMixin", upstream=True) -> None: @@ -224,17 +214,13 @@ def _set_relative( def set_downstream( self, task_or_task_list: Union[TaskMixin, Sequence[TaskMixin]] ) -> None: - """ - Set a TaskGroup/task/list of task downstream of this TaskGroup. - """ + """Set a TaskGroup/task/list of task downstream of this TaskGroup.""" self._set_relative(task_or_task_list, upstream=False) def set_upstream( self, task_or_task_list: Union[TaskMixin, Sequence[TaskMixin]] ) -> None: - """ - Set a TaskGroup/task/list of task upstream of this TaskGroup. - """ + """Set a TaskGroup/task/list of task upstream of this TaskGroup.""" self._set_relative(task_or_task_list, upstream=True) def __enter__(self): @@ -245,9 +231,7 @@ def __exit__(self, _type, _value, _tb): TaskGroupContext.pop_context_managed_task_group() def has_task(self, task: "BaseOperator") -> bool: - """ - Returns True if this TaskGroup or its children TaskGroups contains the given task. - """ + """Returns True if this TaskGroup or its children TaskGroups contains the given task.""" if task.task_id in self.children: return True @@ -310,9 +294,7 @@ def downstream_join_id(self) -> str: return f"{self.group_id}.downstream_join_id" def get_task_group_dict(self) -> Dict[str, "TaskGroup"]: - """ - Returns a flat dictionary of group_id: TaskGroup - """ + """Returns a flat dictionary of group_id: TaskGroup""" task_group_map = {} def build_map(task_group): @@ -328,34 +310,26 @@ def build_map(task_group): return task_group_map def get_child_by_label(self, label: str) -> Union["BaseOperator", "TaskGroup"]: - """ - Get a child task/TaskGroup by its label (i.e. task_id/group_id without the group_id prefix) - """ + """Get a child task/TaskGroup by its label (i.e. task_id/group_id without the group_id prefix)""" return self.children[self.child_id(label)] class TaskGroupContext: - """ - TaskGroup context is used to keep the current TaskGroup when TaskGroup is used as ContextManager. - """ + """TaskGroup context is used to keep the current TaskGroup when TaskGroup is used as ContextManager.""" _context_managed_task_group: Optional[TaskGroup] = None _previous_context_managed_task_groups: List[TaskGroup] = [] @classmethod def push_context_managed_task_group(cls, task_group: TaskGroup): - """ - Push a TaskGroup into the list of managed TaskGroups. - """ + """Push a TaskGroup into the list of managed TaskGroups.""" if cls._context_managed_task_group: cls._previous_context_managed_task_groups.append(cls._context_managed_task_group) cls._context_managed_task_group = task_group @classmethod def pop_context_managed_task_group(cls) -> Optional[TaskGroup]: - """ - Pops the last TaskGroup from the list of manged TaskGroups and update the current TaskGroup. - """ + """Pops the last TaskGroup from the list of manged TaskGroups and update the current TaskGroup.""" old_task_group = cls._context_managed_task_group if cls._previous_context_managed_task_groups: cls._context_managed_task_group = cls._previous_context_managed_task_groups.pop() @@ -365,9 +339,7 @@ def pop_context_managed_task_group(cls) -> Optional[TaskGroup]: @classmethod def get_current_task_group(cls, dag: Optional["DAG"]) -> Optional[TaskGroup]: - """ - Get the current TaskGroup. - """ + """Get the current TaskGroup.""" from airflow.models.dag import DagContext if not cls._context_managed_task_group: diff --git a/airflow/utils/timeout.py b/airflow/utils/timeout.py index d63d82a284bc8..bd88fde6b130f 100644 --- a/airflow/utils/timeout.py +++ b/airflow/utils/timeout.py @@ -24,9 +24,7 @@ class timeout(LoggingMixin): # pylint: disable=invalid-name - """ - To be used in a ``with`` block and timeout its content. - """ + """To be used in a ``with`` block and timeout its content.""" def __init__(self, seconds=1, error_message='Timeout'): super().__init__() @@ -34,9 +32,7 @@ def __init__(self, seconds=1, error_message='Timeout'): self.error_message = error_message + ', PID: ' + str(os.getpid()) def handle_timeout(self, signum, frame): # pylint: disable=unused-argument - """ - Logs information and raises AirflowTaskTimeout. - """ + """Logs information and raises AirflowTaskTimeout.""" self.log.error("Process timed out, PID: %s", str(os.getpid())) raise AirflowTaskTimeout(self.error_message) diff --git a/airflow/utils/trigger_rule.py b/airflow/utils/trigger_rule.py index 831f0eee2aed8..0ce820bf26448 100644 --- a/airflow/utils/trigger_rule.py +++ b/airflow/utils/trigger_rule.py @@ -20,9 +20,7 @@ class TriggerRule: - """ - Class with task's trigger rules. - """ + """Class with task's trigger rules.""" ALL_SUCCESS = 'all_success' ALL_FAILED = 'all_failed' @@ -38,16 +36,12 @@ class TriggerRule: @classmethod def is_valid(cls, trigger_rule): - """ - Validates a trigger rule. - """ + """Validates a trigger rule.""" return trigger_rule in cls.all_triggers() @classmethod def all_triggers(cls): - """ - Returns all trigger rules. - """ + """Returns all trigger rules.""" if not cls._ALL_TRIGGER_RULES: cls._ALL_TRIGGER_RULES = { getattr(cls, attr) diff --git a/airflow/utils/types.py b/airflow/utils/types.py index 9dfee0009f4d7..19787e9d75201 100644 --- a/airflow/utils/types.py +++ b/airflow/utils/types.py @@ -26,9 +26,7 @@ class DagRunType(str, enum.Enum): @staticmethod def from_run_id(run_id: str) -> "DagRunType": - """ - Resolved DagRun type from run_id. - """ + """Resolved DagRun type from run_id.""" for run_type in DagRunType: if run_id and run_id.startswith(f"{run_type.value}__"): return run_type diff --git a/airflow/utils/weekday.py b/airflow/utils/weekday.py index 8bd82a6291e9a..a57281267b2c5 100644 --- a/airflow/utils/weekday.py +++ b/airflow/utils/weekday.py @@ -14,17 +14,13 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Get the ISO standard day number of the week from a given day string -""" +"""Get the ISO standard day number of the week from a given day string""" import enum @enum.unique class WeekDay(enum.IntEnum): - """ - Python Enum containing Days of the Week - """ + """Python Enum containing Days of the Week""" MONDAY = 1 TUESDAY = 2 diff --git a/airflow/utils/weight_rule.py b/airflow/utils/weight_rule.py index 1f688f15990ff..e23f7075c37dd 100644 --- a/airflow/utils/weight_rule.py +++ b/airflow/utils/weight_rule.py @@ -20,9 +20,7 @@ class WeightRule: - """ - Weight rules. - """ + """Weight rules.""" DOWNSTREAM = 'downstream' UPSTREAM = 'upstream' @@ -32,16 +30,12 @@ class WeightRule: @classmethod def is_valid(cls, weight_rule): - """ - Check if weight rule is valid. - """ + """Check if weight rule is valid.""" return weight_rule in cls.all_weight_rules() @classmethod def all_weight_rules(cls): - """ - Returns all weight rules - """ + """Returns all weight rules""" if not cls._ALL_WEIGHT_RULES: cls._ALL_WEIGHT_RULES = { getattr(cls, attr) diff --git a/airflow/www/api/experimental/endpoints.py b/airflow/www/api/experimental/endpoints.py index a4105898b7f12..025bfdef5caf7 100644 --- a/airflow/www/api/experimental/endpoints.py +++ b/airflow/www/api/experimental/endpoints.py @@ -132,9 +132,7 @@ def trigger_dag(dag_id): @api_experimental.route('/dags/', methods=['DELETE']) @requires_authentication def delete_dag(dag_id): - """ - Delete all DB records related to the specified Dag. - """ + """Delete all DB records related to the specified Dag.""" try: count = delete.delete_dag(dag_id) except AirflowException as err: diff --git a/airflow/www/decorators.py b/airflow/www/decorators.py index 20a0ee7b3b3cc..17250d860edf0 100644 --- a/airflow/www/decorators.py +++ b/airflow/www/decorators.py @@ -32,9 +32,7 @@ def action_logging(f: T) -> T: - """ - Decorator to log user actions - """ + """Decorator to log user actions""" @functools.wraps(f) def wrapper(*args, **kwargs): @@ -66,9 +64,7 @@ def wrapper(*args, **kwargs): def gzipped(f: T) -> T: - """ - Decorator to make a view compressed - """ + """Decorator to make a view compressed""" @functools.wraps(f) def view_func(*args, **kwargs): @@ -105,9 +101,7 @@ def zipper(response): # pylint: disable=unused-variable def has_dag_access(**dag_kwargs) -> Callable[[T], T]: - """ - Decorator to check whether the user has read / write permission on the dag. - """ + """Decorator to check whether the user has read / write permission on the dag.""" def decorator(f: T): @functools.wraps(f) diff --git a/airflow/www/extensions/init_appbuilder.py b/airflow/www/extensions/init_appbuilder.py index 32c73b03eb61c..96d12d52702b2 100644 --- a/airflow/www/extensions/init_appbuilder.py +++ b/airflow/www/extensions/init_appbuilder.py @@ -34,9 +34,7 @@ def init_appbuilder(app): ) class AirflowAppBuilder(AppBuilder): - """ - Custom class to prevent side effects of the session. - """ + """Custom class to prevent side effects of the session.""" def _check_and_init(self, baseview): if hasattr(baseview, 'datamodel'): diff --git a/airflow/www/forms.py b/airflow/www/forms.py index 20b35a32fdb98..eb4c8a22dc056 100644 --- a/airflow/www/forms.py +++ b/airflow/www/forms.py @@ -41,9 +41,7 @@ class DateTimeWithTimezoneField(Field): - """ - A text field which stores a `datetime.datetime` matching a format. - """ + """A text field which stores a `datetime.datetime` matching a format.""" widget = widgets.TextInput() @@ -86,9 +84,7 @@ def _get_default_timezone(self): class DateTimeForm(FlaskForm): - """ - Date filter form needed for task views - """ + """Date filter form needed for task views""" execution_date = DateTimeWithTimezoneField( "Execution date", widget=AirflowDateTimePickerWidget()) @@ -112,9 +108,7 @@ class DateTimeWithNumRunsForm(FlaskForm): class DateTimeWithNumRunsWithDagRunsForm(DateTimeWithNumRunsForm): - """ - Date time and number of runs and dag runs form for graph and gantt view - """ + """Date time and number of runs and dag runs form for graph and gantt view""" execution_date = SelectField("DAG run") diff --git a/airflow/www/security.py b/airflow/www/security.py index 816468d3615be..7d47af07e9365 100644 --- a/airflow/www/security.py +++ b/airflow/www/security.py @@ -253,9 +253,7 @@ def get_user_roles(user=None): return user.roles def get_all_permissions_views(self): - """ - Returns a set of tuples with the perm name and view menu name - """ + """Returns a set of tuples with the perm name and view menu name""" perms_views = set() for role in self.get_user_roles(): perms_views.update( @@ -386,23 +384,17 @@ def has_access(self, permission, resource, user=None) -> bool: return has_access def _get_and_cache_perms(self): - """ - Cache permissions-views - """ + """Cache permissions-views""" self.perms = self.get_all_permissions_views() def _has_role(self, role_name_or_list): - """ - Whether the user has this role name - """ + """Whether the user has this role name""" if not isinstance(role_name_or_list, list): role_name_or_list = [role_name_or_list] return any(r.name in role_name_or_list for r in self.get_user_roles()) def _has_perm(self, permission_name, view_menu_name): - """ - Whether the user has this perm - """ + """Whether the user has this perm""" if hasattr(self, 'perms'): if (permission_name, view_menu_name) in self.perms: return True @@ -424,9 +416,7 @@ def has_all_dags_access(self): ) def clean_perms(self): - """ - FAB leaves faulty permissions that need to be cleaned up - """ + """FAB leaves faulty permissions that need to be cleaned up""" self.log.debug('Cleaning faulty perms') sesh = self.get_session pvms = sesh.query(sqla_models.PermissionView).filter( @@ -611,9 +601,7 @@ def sync_roles(self): self.clean_perms() def sync_resource_permissions(self, perms=None): - """ - Populates resource-based permissions. - """ + """Populates resource-based permissions.""" if not perms: return @@ -704,9 +692,7 @@ def _revoke_stale_permissions(dag_view): self.add_permission_role(role, dag_perm) def create_perm_vm_for_all_dag(self): - """ - Create perm-vm if not exist and insert into FAB security model for all-dags. - """ + """Create perm-vm if not exist and insert into FAB security model for all-dags.""" # create perm for global logical dag for dag_vm in self.DAG_VMS: for perm in self.DAG_PERMS: diff --git a/airflow/www/utils.py b/airflow/www/utils.py index 2d087de44be0d..74ffbea83003f 100644 --- a/airflow/www/utils.py +++ b/airflow/www/utils.py @@ -184,9 +184,7 @@ def epoch(dttm): def json_response(obj): - """ - Returns a json response from a json serializable python object - """ + """Returns a json response from a json serializable python object""" return Response( response=json.dumps( obj, indent=4, cls=AirflowJsonEncoder), @@ -195,9 +193,7 @@ def json_response(obj): def make_cache_key(*args, **kwargs): - """ - Used by cache to get a unique key per URL - """ + """Used by cache to get a unique key per URL""" path = request.path args = str(hash(frozenset(request.args.items()))) return (path + args).encode('ascii', 'ignore') @@ -372,9 +368,7 @@ def get_chart_height(dag): class UtcAwareFilterMixin: # noqa: D101 - """ - Mixin for filter for UTC time. - """ + """Mixin for filter for UTC time.""" def apply(self, query, value): """Apply the filter.""" @@ -384,33 +378,23 @@ def apply(self, query, value): class UtcAwareFilterEqual(UtcAwareFilterMixin, fab_sqlafilters.FilterEqual): # noqa: D101 - """ - Equality filter for UTC time. - """ + """Equality filter for UTC time.""" class UtcAwareFilterGreater(UtcAwareFilterMixin, fab_sqlafilters.FilterGreater): # noqa: D101 - """ - Greater Than filter for UTC time. - """ + """Greater Than filter for UTC time.""" class UtcAwareFilterSmaller(UtcAwareFilterMixin, fab_sqlafilters.FilterSmaller): # noqa: D101 - """ - Smaller Than filter for UTC time. - """ + """Smaller Than filter for UTC time.""" class UtcAwareFilterNotEqual(UtcAwareFilterMixin, fab_sqlafilters.FilterNotEqual): # noqa: D101 - """ - Not Equal To filter for UTC time. - """ + """Not Equal To filter for UTC time.""" class UtcAwareFilterConverter(fab_sqlafilters.SQLAFilterConverter): # noqa: D101 - """ - Retrieve conversion tables for UTC-Aware filters. - """ + """Retrieve conversion tables for UTC-Aware filters.""" conversion_table = ( (('is_utcdatetime', [UtcAwareFilterEqual, diff --git a/airflow/www/views.py b/airflow/www/views.py index ce7ca7e5fad8c..7664f707b5581 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -242,9 +242,7 @@ def dag_edges(dag): task_group_map = dag.task_group.get_task_group_dict() def collect_edges(task_group): - """ - Update edges_to_add and edges_to_skip according to TaskGroups. - """ + """Update edges_to_add and edges_to_skip according to TaskGroups.""" if isinstance(task_group, BaseOperator): return @@ -361,9 +359,7 @@ def render_template(self, *args, **kwargs): class Airflow(AirflowBaseView): # noqa: D101 pylint: disable=too-many-public-methods - """ - Main Airflow application. - """ + """Main Airflow application.""" @expose('/health') def health(self): @@ -3161,9 +3157,7 @@ class DagModelView(AirflowModelView): base_filters = [['dag_id', DagFilter, lambda: []]] def get_query(self): - """ - Default filters for model - """ + """Default filters for model""" return ( super().get_query() # noqa pylint: disable=no-member .filter(or_(models.DagModel.is_active, @@ -3172,9 +3166,7 @@ def get_query(self): ) def get_count_query(self): - """ - Default filters for model - """ + """Default filters for model""" return ( super().get_count_query() # noqa pylint: disable=no-member .filter(models.DagModel.is_active) diff --git a/dags/test_dag.py b/dags/test_dag.py index a0c30afcac5e8..8a1695f310e3f 100644 --- a/dags/test_dag.py +++ b/dags/test_dag.py @@ -15,9 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -This dag only runs some simple tasks to test Airflow's task execution. -""" +"""This dag only runs some simple tasks to test Airflow's task execution.""" from datetime import datetime, timedelta from airflow.models.dag import DAG diff --git a/docs/build_docs.py b/docs/build_docs.py index e6eea86ee98b9..92238125030f5 100755 --- a/docs/build_docs.py +++ b/docs/build_docs.py @@ -39,9 +39,7 @@ @total_ordering class DocBuildError(NamedTuple): - """ - Errors found in docs build. - """ + """Errors found in docs build.""" file_path: Optional[str] line_no: Optional[int] @@ -65,9 +63,7 @@ def __lt__(self, other): @total_ordering class SpellingError(NamedTuple): - """ - Spelling errors found when building docs. - """ + """Spelling errors found when building docs.""" file_path: Optional[str] line_no: Optional[int] @@ -107,9 +103,7 @@ def __lt__(self, other): def clean_files() -> None: - """ - Cleanup all artifacts generated by previous builds. - """ + """Cleanup all artifacts generated by previous builds.""" shutil.rmtree(_API_DIR, ignore_errors=True) shutil.rmtree(_BUILD_DIR, ignore_errors=True) os.makedirs(_API_DIR, exist_ok=True) @@ -118,9 +112,7 @@ def clean_files() -> None: def display_errors_summary() -> None: - """ - Displays summary of errors - """ + """Displays summary of errors""" for warning_no, error in enumerate(sorted(build_errors), 1): print("=" * 20, f"Error {warning_no:3}", "=" * 20) print(error.message) @@ -136,9 +128,7 @@ def display_errors_summary() -> None: def display_spelling_error_summary() -> None: - """ - Displays summary of Spelling errors - """ + """Displays summary of Spelling errors""" for warning_no, error in enumerate(sorted(spelling_errors), 1): print("=" * 20, f"Error {warning_no:3}", "=" * 20) print(error.message) @@ -191,9 +181,7 @@ def extract_ast_class_def_by_name(ast_tree, class_name): """ class ClassVisitor(ast.NodeVisitor): - """ - Visitor. - """ + """Visitor.""" def __init__(self): self.found_class_node = None @@ -214,10 +202,7 @@ def visit_ClassDef(self, node): # pylint: disable=invalid-name def check_guide_links_in_operator_descriptions() -> None: - """ - Check if there are links to guides in operator's descriptions. - - """ + """Check if there are links to guides in operator's descriptions.""" def generate_build_error(path, line_no, operator_name): return DocBuildError( @@ -327,9 +312,7 @@ def find_modules(deprecated_only: bool = False) -> Set[str]: def check_class_links_in_operators_and_hooks_ref() -> None: - """ - Checks classes and links in the operators and hooks ref. - """ + """Checks classes and links in the operators and hooks ref.""" with open(os.path.join(DOCS_DIR, "operators-and-hooks-ref.rst")) as ref_file: content = ref_file.read() current_modules_in_file = set(re.findall(r":mod:`(.+?)`", content)) @@ -361,9 +344,7 @@ def check_class_links_in_operators_and_hooks_ref() -> None: def check_guide_links_in_operators_and_hooks_ref() -> None: - """ - Checks all guide links in operators and hooks references. - """ + """Checks all guide links in operators and hooks references.""" all_guides = glob(f"{DOCS_DIR}/howto/operator/**/*.rst", recursive=True) # Remove extension all_guides = [ @@ -406,9 +387,7 @@ def check_guide_links_in_operators_and_hooks_ref() -> None: def check_exampleinclude_for_example_dags(): - """ - Checks all exampleincludes for example dags. - """ + """Checks all exampleincludes for example dags.""" all_docs_files = glob(f"${DOCS_DIR}/**/*rst", recursive=True) for doc_file in all_docs_files: @@ -423,9 +402,7 @@ def check_exampleinclude_for_example_dags(): def check_enforce_code_block(): - """ - Checks all code:: blocks. - """ + """Checks all code:: blocks.""" all_docs_files = glob(f"${DOCS_DIR}/**/*rst", recursive=True) for doc_file in all_docs_files: @@ -457,10 +434,7 @@ def check_enforce_code_block(): def check_google_guides(): - """ - Checks Google guides. - - """ + """Checks Google guides.""" doc_files = glob(f"{DOCS_DIR}/howto/operator/google/**/*.rst", recursive=True) doc_names = {f.split("/")[-1].rsplit(".")[0] for f in doc_files} @@ -677,9 +651,7 @@ def check_spelling() -> None: def build_sphinx_docs() -> None: - """ - Build documentation for sphinx. - """ + """Build documentation for sphinx.""" with NamedTemporaryFile() as tmp_file: build_cmd = [ "sphinx-build", diff --git a/docs/exts/exampleinclude.py b/docs/exts/exampleinclude.py index 778ef6edc0629..774098b6cb32a 100644 --- a/docs/exts/exampleinclude.py +++ b/docs/exts/exampleinclude.py @@ -42,9 +42,7 @@ class ExampleHeader(nodes.reference, nodes.FixedTextElement): # pylint: disable=too-many-ancestors - """ - Header for examples. - """ + """Header for examples.""" class ExampleInclude(SphinxDirective): diff --git a/docs/exts/redirects.py b/docs/exts/redirects.py index 5ea4885f7fedd..8a06c42aa18b2 100644 --- a/docs/exts/redirects.py +++ b/docs/exts/redirects.py @@ -14,9 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Based on: https://github.com/sphinx-contrib/redirects -""" +"""Based on: https://github.com/sphinx-contrib/redirects""" import os from sphinx.builders import html as builders diff --git a/docs/exts/sphinx_script_update.py b/docs/exts/sphinx_script_update.py index 0210aa7617c92..4b3930a5b3eac 100644 --- a/docs/exts/sphinx_script_update.py +++ b/docs/exts/sphinx_script_update.py @@ -35,9 +35,7 @@ def _gethash(string: str): def _user_cache_dir(appname=None): - """ - Return full path to the user-specific cache dir for this application - """ + """Return full path to the user-specific cache dir for this application""" if sys.platform == "win32": # Windows has a complex procedure to download the App Dir directory because this directory can be # changed in window registry, so i use temporary directory for cache diff --git a/metastore_browser/hive_metastore.py b/metastore_browser/hive_metastore.py index f3890ead23b1f..98413695155c5 100644 --- a/metastore_browser/hive_metastore.py +++ b/metastore_browser/hive_metastore.py @@ -46,17 +46,13 @@ class MetastoreBrowserView(BaseView): - """ - Creating a Flask-AppBuilder BaseView - """ + """Creating a Flask-AppBuilder BaseView""" default_view = 'index' @expose('/') def index(self): - """ - Create default view - """ + """Create default view""" sql = """ SELECT a.name as db, db_location_uri as location, @@ -80,9 +76,7 @@ def index(self): @expose('/table/') def table(self): - """ - Create table view - """ + """Create table view""" table_name = request.args.get("table") metastore = HiveMetastoreHook(METASTORE_CONN_ID) table = metastore.get_table(table_name) @@ -92,9 +86,7 @@ def table(self): @expose('/db/') def db(self): - """ - Show tables in database - """ + """Show tables in database""" db = request.args.get("db") metastore = HiveMetastoreHook(METASTORE_CONN_ID) tables = sorted(metastore.get_tables(db=db), key=lambda x: x.tableName) @@ -104,9 +96,7 @@ def db(self): @gzipped @expose('/partitions/') def partitions(self): - """ - Retrieve table partitions - """ + """Retrieve table partitions""" schema, table = request.args.get("table").split('.') sql = """ SELECT @@ -135,9 +125,7 @@ def partitions(self): @gzipped @expose('/objects/') def objects(self): - """ - Retrieve objects from TBLS and DBS - """ + """Retrieve objects from TBLS and DBS""" where_clause = '' if DB_ALLOW_LIST: dbs = ",".join(["'" + db + "'" for db in DB_ALLOW_LIST]) @@ -166,9 +154,7 @@ def objects(self): @gzipped @expose('/data/') def data(self): - """ - Retrieve data from table - """ + """Retrieve data from table""" table = request.args.get("table") sql = "SELECT * FROM {table} LIMIT 1000;".format(table=table) hook = PrestoHook(PRESTO_CONN_ID) @@ -180,9 +166,7 @@ def data(self): @expose('/ddl/') def ddl(self): - """ - Retrieve table ddl - """ + """Retrieve table ddl""" table = request.args.get("table") sql = "SHOW CREATE TABLE {table};".format(table=table) hook = HiveCliHook(HIVE_CLI_CONN_ID) @@ -198,9 +182,7 @@ def ddl(self): class MetastoreBrowserPlugin(AirflowPlugin): - """ - Defining the plugin class - """ + """Defining the plugin class""" name = "metastore_browser" flask_blueprints = [bp]