Skip to content

Commit

Permalink
Enable CI on Github
Browse files Browse the repository at this point in the history
Add a GitHub workflow `.github/workflows/ci.yml` that will trigger on
push and pull requests. It will run the five actions it defines:

 * `conda`: will install conda and create the environment
 * `docs`: builds the documentation with nitpick warning
 * `pre-commit`: runs pre-commit on all files
 * `tests`: runs `verdi devel tests` and the stand alone test files
 * `verdi`: runs the tests that check the load time is acceptable

All tests are performed for python 3.7 except for the `tests` action
that is done for 3.5 as well. Both python versions are run for both
database backends in a matrix strategy. Even though we support 3.6 as
well we do not explicitly test it since it will require another two
builds and testing 3.5 and 3.7 should give decent guarantees.

Finally the argument for multiple individual actions instead of joining
them is based on the fact that there does not seem to be a limit on
concurrent number of actions on GitHub as of this writing. This means
that by spreading them out, allows running them in parallel which should
reduce the overal runtime of the continuous integration workflow.
  • Loading branch information
sphuber committed Dec 3, 2019
1 parent 0644e7d commit 81d736c
Show file tree
Hide file tree
Showing 23 changed files with 381 additions and 84 deletions.
157 changes: 88 additions & 69 deletions .ci/test_daemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,47 +7,47 @@
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################

# pylint: disable=no-name-in-module
"""Tests to run with a running daemon."""
import subprocess
import sys
import time


from aiida.common import exceptions
from aiida.engine import run_get_node, submit
from aiida.engine.daemon.client import get_daemon_client
from aiida.engine.persistence import ObjectLoader
from aiida.manage.caching import enable_caching
from aiida.orm import CalcJobNode, Code, load_node, Int, Str, List
from aiida.plugins import CalculationFactory, DataFactory
from aiida.orm import CalcJobNode, load_node, Int, Str, List, Dict, load_code
from aiida.plugins import CalculationFactory
from workchains import (
NestedWorkChain, DynamicNonDbInput, DynamicDbInput, DynamicMixedInput, ListEcho, CalcFunctionRunnerWorkChain,
WorkFunctionRunnerWorkChain, NestedInputNamespace, SerializeWorkChain
)


Dict = DataFactory('dict')

codename = 'doubler@torquessh'
timeout_secs = 4 * 60 # 4 minutes
number_calculations = 15 # Number of calculations to submit
number_workchains = 8 # Number of workchains to submit
CODENAME = 'doubler'
TIMEOUTSECS = 4 * 60 # 4 minutes
NUMBER_CALCULATIONS = 15 # Number of calculations to submit
NUMBER_WORKCHAINS = 8 # Number of workchains to submit


def print_daemon_log():
"""Print daemon log."""
daemon_client = get_daemon_client()
daemon_log = daemon_client.daemon_log_file

print("Output of 'cat {}':".format(daemon_log))
try:
print(subprocess.check_output(
['cat', '{}'.format(daemon_log)], stderr=subprocess.STDOUT,
['cat', '{}'.format(daemon_log)],
stderr=subprocess.STDOUT,
))
except subprocess.CalledProcessError as e:
print('Note: the command failed, message: {}'.format(e))
except subprocess.CalledProcessError as exception:
print('Note: the command failed, message: {}'.format(exception))


def jobs_have_finished(pks):
"""Check if jobs with given pks have finished."""
finished_list = [load_node(pk).is_terminated for pk in pks]
node_list = [load_node(pk) for pk in pks]
num_finished = len([_ for _ in finished_list if _])
Expand All @@ -56,10 +56,11 @@ def jobs_have_finished(pks):
if not node.is_terminated:
print('not terminated: {} [{}]'.format(node.pk, node.process_state))
print('{}/{} finished'.format(num_finished, len(finished_list)))
return not (False in finished_list)
return False not in finished_list


def print_report(pk):
"""Print the process report for given pk."""
print("Output of 'verdi process report {}':".format(pk))
try:
print(subprocess.check_output(
Expand All @@ -71,13 +72,17 @@ def print_report(pk):


def validate_calculations(expected_results):
"""Validate the calculations."""
valid = True
actual_dict = {}
for pk, expected_dict in expected_results.items():
calc = load_node(pk)
if not calc.is_finished_ok:
print('Calculation<{}> not finished ok: process_state<{}> exit_status<{}>'
.format(pk, calc.process_state, calc.exit_status))
print(
'Calculation<{}> not finished ok: process_state<{}> exit_status<{}>'.format(
pk, calc.process_state, calc.exit_status
)
)
print_report(pk)
valid = False

Expand All @@ -95,39 +100,48 @@ def validate_calculations(expected_results):
pass

if actual_dict != expected_dict:
print('* UNEXPECTED VALUE {} for calc pk={}: I expected {}'
.format(actual_dict, pk, expected_dict))
print('* UNEXPECTED VALUE {} for calc pk={}: I expected {}'.format(actual_dict, pk, expected_dict))
valid = False

return valid


def validate_workchains(expected_results):
"""Validate the workchains."""
valid = True
for pk, expected_value in expected_results.items():
this_valid = True
try:
calc = load_node(pk)
actual_value = calc.outputs.output
except (exceptions.NotExistent, AttributeError) as exception:
print('* UNABLE TO RETRIEVE VALUE for workchain pk={}: I expected {}, I got {}: {}'
.format(pk, expected_value, type(exception), exception))
print(
'* UNABLE TO RETRIEVE VALUE for workchain pk={}: I expected {}, I got {}: {}'.format(
pk, expected_value, type(exception), exception
)
)
valid = False
this_valid = False
actual_value = None

# I check only if this_valid, otherwise calc could not exist
if this_valid and not calc.is_finished_ok:
print('Calculation<{}> not finished ok: process_state<{}> exit_status<{}>'
.format(pk, calc.process_state, calc.exit_status))
print(
'Calculation<{}> not finished ok: process_state<{}> exit_status<{}>'.format(
pk, calc.process_state, calc.exit_status
)
)
print_report(pk)
valid = False
this_valid = False

# I check only if this_valid, otherwise actual_value could be unset
if this_valid and actual_value != expected_value:
print('* UNEXPECTED VALUE {}, type {} for workchain pk={}: I expected {}, type {}'
.format(actual_value, type(actual_value), pk, expected_value, type(expected_value)))
print(
'* UNEXPECTED VALUE {}, type {} for workchain pk={}: I expected {}, type {}'.format(
actual_value, type(actual_value), pk, expected_value, type(expected_value)
)
)
valid = False
this_valid = False

Expand All @@ -142,8 +156,11 @@ def validate_cached(cached_calcs):
for calc in cached_calcs:

if not calc.is_finished_ok:
print('Cached calculation<{}> not finished ok: process_state<{}> exit_status<{}>'
.format(calc.pk, calc.process_state, calc.exit_status))
print(
'Cached calculation<{}> not finished ok: process_state<{}> exit_status<{}>'.format(
calc.pk, calc.process_state, calc.exit_status
)
)
print_report(calc.pk)
valid = False

Expand All @@ -162,13 +179,19 @@ def validate_cached(cached_calcs):
print_report(calc.pk)
valid = False
if not files_original:
print('Original calculation <{}> does not have any raw inputs files after being cached from.'
.format(original_calc.pk))
print(
'Original calculation <{}> does not have any raw inputs files after being cached from.'.format(
original_calc.pk
)
)
valid = False

if set(files_original) != set(files_cached):
print('different raw input files [{}] vs [{}] for original<{}> and cached<{}> calculation'.format(
set(files_original), set(files_cached), original_calc.pk, calc.pk))
print(
'different raw input files [{}] vs [{}] for original<{}> and cached<{}> calculation'.format(
set(files_original), set(files_cached), original_calc.pk, calc.pk
)
)
valid = False

return valid
Expand All @@ -189,7 +212,7 @@ def run_calculation(code, counter, inputval):
Run a calculation through the Process layer.
"""
process, inputs, expected_result = create_calculation_process(code=code, inputval=inputval)
result, calc = run_get_node(process, **inputs)
_, calc = run_get_node(process, **inputs)
print('[{}] ran calculation {}, pk={}'.format(counter, calc.uuid, calc.pk))
return calc, expected_result

Expand All @@ -200,19 +223,21 @@ def create_calculation_process(code, inputval):
"""
TemplatereplacerCalculation = CalculationFactory('templatereplacer')
parameters = Dict(dict={'value': inputval})
template = Dict(dict={
# The following line adds a significant sleep time.
# I set it to 1 second to speed up tests
# I keep it to a non-zero value because I want
# To test the case when AiiDA finds some calcs
# in a queued state
# 'cmdline_params': ["{}".format(counter % 3)], # Sleep time
'cmdline_params': ['1'],
'input_file_template': '{value}', # File just contains the value to double
'input_file_name': 'value_to_double.txt',
'output_file_name': 'output.txt',
'retrieve_temporary_files': ['triple_value.tmp']
})
template = Dict(
dict={
# The following line adds a significant sleep time.
# I set it to 1 second to speed up tests
# I keep it to a non-zero value because I want
# To test the case when AiiDA finds some calcs
# in a queued state
# 'cmdline_params': ["{}".format(counter % 3)], # Sleep time
'cmdline_params': ['1'],
'input_file_template': '{value}', # File just contains the value to double
'input_file_name': 'value_to_double.txt',
'output_file_name': 'output.txt',
'retrieve_temporary_files': ['triple_value.tmp']
}
)
options = {
'resources': {
'num_machines': 1
Expand All @@ -222,12 +247,7 @@ def create_calculation_process(code, inputval):
'parser_name': 'templatereplacer.doubler',
}

expected_result = {
'value': 2 * inputval,
'retrieved_temporary_files': {
'triple_value.tmp': str(inputval * 3)
}
}
expected_result = {'value': 2 * inputval, 'retrieved_temporary_files': {'triple_value.tmp': str(inputval * 3)}}

inputs = {
'code': code,
Expand All @@ -241,24 +261,24 @@ def create_calculation_process(code, inputval):


def main():
"""Launch a bunch of calculation jobs and workchains."""
# pylint: disable=too-many-locals,too-many-statements
expected_results_calculations = {}
expected_results_workchains = {}
code = Code.get_from_string(codename)
code = load_code(CODENAME)

# Submitting the Calculations the new way directly through the launchers
print('Submitting {} calculations to the daemon'.format(number_calculations))
for counter in range(1, number_calculations + 1):
print('Submitting {} calculations to the daemon'.format(NUMBER_CALCULATIONS))
for counter in range(1, NUMBER_CALCULATIONS + 1):
inputval = counter
calc, expected_result = launch_calculation(
code=code, counter=counter, inputval=inputval
)
calc, expected_result = launch_calculation(code=code, counter=counter, inputval=inputval)
expected_results_calculations[calc.pk] = expected_result

# Submitting the Workchains
print('Submitting {} workchains to the daemon'.format(number_workchains))
for index in range(number_workchains):
print('Submitting {} workchains to the daemon'.format(NUMBER_WORKCHAINS))
for index in range(NUMBER_WORKCHAINS):
inp = Int(index)
result, node = run_get_node(NestedWorkChain, inp=inp)
_, node = run_get_node(NestedWorkChain, inp=inp)
expected_results_workchains[node.pk] = index

print("Submitting a workchain with 'submit'.")
Expand Down Expand Up @@ -315,7 +335,7 @@ def main():
print('Wating for end of execution...')
start_time = time.time()
exited_with_timeout = True
while time.time() - start_time < timeout_secs:
while time.time() - start_time < TIMEOUTSECS:
time.sleep(15) # Wait a few seconds

# Print some debug info, both for debugging reasons and to avoid
Expand All @@ -330,17 +350,17 @@ def main():
['verdi', 'process', 'list', '-a'],
stderr=subprocess.STDOUT,
))
except subprocess.CalledProcessError as e:
print('Note: the command failed, message: {}'.format(e))
except subprocess.CalledProcessError as exception:
print('Note: the command failed, message: {}'.format(exception))

print("Output of 'verdi daemon status':")
try:
print(subprocess.check_output(
['verdi', 'daemon', 'status'],
stderr=subprocess.STDOUT,
))
except subprocess.CalledProcessError as e:
print('Note: the command failed, message: {}'.format(e))
except subprocess.CalledProcessError as exception:
print('Note: the command failed, message: {}'.format(exception))

if jobs_have_finished(pks):
print('Calculation terminated its execution')
Expand All @@ -350,22 +370,21 @@ def main():
if exited_with_timeout:
print_daemon_log()
print('')
print('Timeout!! Calculation did not complete after {} seconds'.format(timeout_secs))
print('Timeout!! Calculation did not complete after {} seconds'.format(TIMEOUTSECS))
sys.exit(2)
else:
# Launch the same calculations but with caching enabled -- these should be FINISHED immediately
cached_calcs = []
with enable_caching(identifier='aiida.calculations:templatereplacer'):
for counter in range(1, number_calculations + 1):
for counter in range(1, NUMBER_CALCULATIONS + 1):
inputval = counter
calc, expected_result = run_calculation(code=code, counter=counter, inputval=inputval)
cached_calcs.append(calc)
expected_results_calculations[calc.pk] = expected_result

if (
validate_calculations(expected_results_calculations) and
validate_workchains(expected_results_workchains) and
validate_cached(cached_calcs)
validate_workchains(expected_results_workchains) and validate_cached(cached_calcs)
):
print_daemon_log()
print('')
Expand Down
9 changes: 9 additions & 0 deletions .github/config/doubler.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
label: doubler
description: doubler
input_plugin: templatereplacer
on_computer: true
computer: localhost
remote_abs_path: PLACEHOLDER_REMOTE_ABS_PATH
prepend_text: ' '
append_text: ' '
2 changes: 2 additions & 0 deletions .github/config/localhost-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
---
safe_interval: 0
12 changes: 12 additions & 0 deletions .github/config/localhost.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
---
label: localhost
description: localhost
hostname: localhost
transport: local
scheduler: direct
shebang: '#!/usr/bin/env bash'
work_dir: PLACEHOLDER_WORK_DIR
mpirun_command: ' '
mpiprocs_per_machine: 1
prepend_text: ' '
append_text: ' '
14 changes: 14 additions & 0 deletions .github/config/profile.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
---
profile: PLACEHOLDER_PROFILE
email: aiida@localhost
first_name: Giuseppe
last_name: Verdi
institution: Khedivial
db_backend: PLACEHOLDER_BACKEND
db_engine: postgresql_psycopg2
db_host: localhost
db_port: 5432
db_name: PLACEHOLDER_DATABASE_NAME
db_username: postgres
db_password: ''
repository: PLACEHOLDER_REPOSITORY
Loading

0 comments on commit 81d736c

Please sign in to comment.