From 551ea193a36f8ffb4a7cfb22269bc3596e9aa190 Mon Sep 17 00:00:00 2001 From: skchronicles Date: Wed, 12 Jul 2023 15:45:20 -0400 Subject: [PATCH] Adding pipeline hooks to get SLURM job information --- workflow/Snakefile | 3 +- workflow/rules/hooks.smk | 79 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 81 insertions(+), 1 deletion(-) create mode 100644 workflow/rules/hooks.smk diff --git a/workflow/Snakefile b/workflow/Snakefile index 70caed3..f1f7d4e 100644 --- a/workflow/Snakefile +++ b/workflow/Snakefile @@ -41,6 +41,7 @@ with open(join('config', 'cluster.json')) as fh: # Imported rules include: join("rules", "common.smk") +include: join("rules", "hooks.smk") include: join("rules", "paired-end.smk") # Targets of the pipeline to build the DAG, @@ -132,4 +133,4 @@ rule all: provided( [join(workpath,"Project","Project.contig.classification.html")], do_aggregate - ) \ No newline at end of file + ) diff --git a/workflow/rules/hooks.smk b/workflow/rules/hooks.smk new file mode 100644 index 0000000..1df69c1 --- /dev/null +++ b/workflow/rules/hooks.smk @@ -0,0 +1,79 @@ +# Adding handlers for displaying status of the +# pipeline and for getting job information for +# previously submitted jobs using `jobby`: +# https://github.com/OpenOmics/scribble/blob/main/scripts/jobby/jobby +if config['options']['mode'] == 'slurm': + onstart: + shell( + """ + # Move any job information for a previous + # instance of the pipeline to logfiles + sleep 5; rm -f COMPLETED FAILED RUNNING; + touch RUNNING + for f in job_information_*.tsv; do + # Skip over non-existant files + [ -e "${{f}}" ] || continue + mv ${{f}} logfiles/; + done + + for f in failed_jobs_*.tsv; do + # Skip over non-existant files + [ -e "${{f}}" ] || continue + mv ${{f}} logfiles/; + done + """ + ) + + onsuccess: + shell( + """ + # Get job information on all + # previously submitted jobs + sleep 15; rm -f COMPLETED FAILED RUNNING; + timestamp=$(date +"%Y-%m-%d_%H-%M-%S"); + ./workflow/scripts/jobby \\ + $(grep --color=never "^Submitted .* external jobid" logfiles/snakemake.log \\ + | awk '{{print $NF}}' \\ + | sed "s/['.]//g" \\ + | sort \\ + | uniq \\ + | tr "\\n" " " + ) \\ + > job_information_${{timestamp}}.tsv + + # Get information on any child + # job(s) that may have failed + grep --color=never \\ + '^jobid\\|FAILED' \\ + job_information_${{timestamp}}.tsv \\ + > failed_jobs_${{timestamp}}.tsv + touch COMPLETED + """ + ) + + onerror: + shell( + """ + # Get job information on all + # previously submitted jobs + sleep 15; rm -f COMPLETED FAILED RUNNING; + timestamp=$(date +"%Y-%m-%d_%H-%M-%S"); + ./workflow/scripts/jobby \\ + $(grep --color=never "^Submitted .* external jobid" logfiles/snakemake.log \\ + | awk '{{print $NF}}' \\ + | sed "s/['.]//g" \\ + | sort \\ + | uniq \\ + | tr "\\n" " " + ) \\ + > job_information_${{timestamp}}.tsv + + # Get information on any child + # job(s) that may have failed + grep --color=never \\ + '^jobid\\|FAILED' \\ + job_information_${{timestamp}}.tsv \\ + > failed_jobs_${{timestamp}}.tsv + touch FAILED + """ + )