Skip to content

Commit

Permalink
Add 'main' param to template_fields in DataprocSubmitPySparkJobOperat…
Browse files Browse the repository at this point in the history
…or (#9154)

(cherry picked from commit 9bcdada)
  • Loading branch information
kaxil committed Jul 1, 2020
1 parent d1a9830 commit c349a22
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
4 changes: 2 additions & 2 deletions airflow/contrib/operators/dataproc_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -1057,7 +1057,7 @@ class DataProcPySparkOperator(DataProcJobBaseOperator):
Start a PySpark Job on a Cloud DataProc cluster.
:param main: [Required] The Hadoop Compatible Filesystem (HCFS) URI of the main
Python file to use as the driver. Must be a .py file.
Python file to use as the driver. Must be a .py file. (templated)
:type main: str
:param arguments: Arguments for the job. (templated)
:type arguments: list
Expand All @@ -1077,7 +1077,7 @@ class DataProcPySparkOperator(DataProcJobBaseOperator):
:type dataproc_pyspark_jars: list
"""

template_fields = ['arguments', 'job_name', 'cluster_name',
template_fields = ['main', 'arguments', 'job_name', 'cluster_name',
'region', 'dataproc_jars', 'dataproc_properties']
ui_color = '#0273d4'
job_type = 'pysparkJob'
Expand Down
2 changes: 1 addition & 1 deletion tests/contrib/operators/test_dataproc_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -1112,7 +1112,7 @@ def test_render_template(self):
)

self.assertEqual(
task.template_fields, ['arguments', 'job_name', 'cluster_name',
task.template_fields, ['main', 'arguments', 'job_name', 'cluster_name',
'region', 'dataproc_jars', 'dataproc_properties'])

ti = TaskInstance(task, DEFAULT_DATE)
Expand Down

0 comments on commit c349a22

Please sign in to comment.