diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 540598d089ce0..d35c798c70849 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -340,7 +340,6 @@ repos: ^airflow\/providers\/apache\/spark\/operators\/spark_submit.py\.py$| ^airflow\/providers\/google\/cloud\/operators\/vertex_ai\/auto_ml\.py$| ^airflow\/providers\/apache\/spark\/operators\/spark_submit\.py$| - ^airflow\/providers\/apache\/spark\/operators\/spark_sql\.py$| ^airflow\/providers\/databricks\/operators\/databricks_sql\.py$| )$ - id: ruff diff --git a/airflow/providers/apache/spark/operators/spark_sql.py b/airflow/providers/apache/spark/operators/spark_sql.py index 47487fa384057..7e48b748c603f 100644 --- a/airflow/providers/apache/spark/operators/spark_sql.py +++ b/airflow/providers/apache/spark/operators/spark_sql.py @@ -19,6 +19,9 @@ from typing import TYPE_CHECKING, Any, Sequence +from deprecated import deprecated + +from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.models import BaseOperator from airflow.providers.apache.spark.hooks.spark_sql import SparkSqlHook @@ -52,9 +55,9 @@ class SparkSqlOperator(BaseOperator): (Default: The ``queue`` value set in the Connection, or ``"default"``) """ - template_fields: Sequence[str] = ("_sql",) + template_fields: Sequence[str] = ("sql",) template_ext: Sequence[str] = (".sql", ".hql") - template_fields_renderers = {"_sql": "sql"} + template_fields_renderers = {"sql": "sql"} def __init__( self, @@ -75,7 +78,7 @@ def __init__( **kwargs: Any, ) -> None: super().__init__(**kwargs) - self._sql = sql + self.sql = sql self._conf = conf self._conn_id = conn_id self._total_executor_cores = total_executor_cores @@ -90,6 +93,15 @@ def __init__( self._yarn_queue = yarn_queue self._hook: SparkSqlHook | None = None + @property + @deprecated( + reason="`_sql` is deprecated and will be removed in the future. Please use `sql` instead.", + category=AirflowProviderDeprecationWarning, + ) + def _sql(self): + """Alias for ``sql``, used for compatibility (deprecated).""" + return self.sql + def execute(self, context: Context) -> None: """Call the SparkSqlHook to run the provided sql query.""" if self._hook is None: