Skip to content

Commit

Permalink
[AIRFLOW-XXX] Don't spam test logs with "bad cron expression" messages (
Browse files Browse the repository at this point in the history
#3973)

We needed these test dags to check the behaviour of invalid cron
expressions, but by default we were loading them every time we create a
DagBag (which many, many tests to).

Instead we ignore these known-bad dags by default, and the test checking
those (tests/models.py:DagBagTest.test_process_file_cron_validity_check)
is already explicitly processing those DAGs directly, so it remains
tested.
  • Loading branch information
ashb authored and kaxil committed Nov 29, 2018
1 parent c0057d0 commit 0fa1c93
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 6 deletions.
1 change: 1 addition & 0 deletions tests/dags/.airflowignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
.*_invalid.*
18 changes: 12 additions & 6 deletions tests/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -3319,16 +3319,22 @@ def test_list_py_file_paths(self):
[JIRA-1357] Test the 'list_py_file_paths' function used by the
scheduler to list and load DAGs.
"""
detected_files = []
expected_files = []
detected_files = set()
expected_files = set()
# No_dags is empty, _invalid_ is ignored by .airflowignore
ignored_files = [
'no_dags.py',
'test_invalid_cron.py',
'test_zip_invalid_cron.zip',
]
for file_name in os.listdir(TEST_DAGS_FOLDER):
if file_name.endswith('.py') or file_name.endswith('.zip'):
if file_name not in ['no_dags.py']:
expected_files.append(
if file_name not in ignored_files:
expected_files.add(
'{}/{}'.format(TEST_DAGS_FOLDER, file_name))
for file_path in list_py_file_paths(TEST_DAGS_FOLDER):
detected_files.append(file_path)
self.assertEqual(sorted(detected_files), sorted(expected_files))
detected_files.add(file_path)
self.assertEqual(detected_files, expected_files)

def test_reset_orphaned_tasks_nothing(self):
"""Try with nothing. """
Expand Down

0 comments on commit 0fa1c93

Please sign in to comment.