-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #99 from CCBR/refactor-gui
Refactor the XAVIER GUI
- Loading branch information
Showing
14 changed files
with
691 additions
and
149 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
File renamed without changes.
File renamed without changes.
0
config/genomes/hg38.frce.json → config/genomes/frce/hg38.json
100755 → 100644
File renamed without changes.
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -44,137 +44,26 @@ | |
import argparse # potential python3 3rd party package, added in python/3.5 | ||
|
||
# Local imports | ||
from .run import init, setup, bind, dryrun, runner | ||
from .run import init, setup, bind, dryrun, runner, run | ||
from .shells import bash | ||
from .options import genome_options | ||
from .util import err, exists, fatal, permissions, check_cache, require, get_version | ||
from .util import ( | ||
err, | ||
exists, | ||
fatal, | ||
permissions, | ||
check_cache, | ||
require, | ||
get_version, | ||
get_genomes_list, | ||
) | ||
from .gui import launch_gui | ||
|
||
__version__ = get_version() | ||
__email__ = "[email protected]" | ||
__home__ = os.path.dirname(os.path.abspath(__file__)) | ||
|
||
|
||
def run(sub_args): | ||
"""Initialize, setup, and run the XAVIER pipeline. | ||
Calls initialize() to create output directory and copy over pipeline resources, | ||
setup() to create the pipeline config file, dryrun() to ensure their are no issues | ||
before running the pipeline, and finally run() to execute the Snakemake workflow. | ||
@param sub_args <parser.parse_args() object>: | ||
Parsed arguments for run sub-command | ||
""" | ||
# Step 0. Check for required dependencies | ||
# The pipelines has only two requirements: | ||
# snakemake and singularity | ||
require(["snakemake", "singularity"], ["snakemake", "singularity"]) | ||
|
||
# Optional Step. Initialize working directory, | ||
# copy over required resources to run | ||
# the pipeline | ||
git_repo = __home__ | ||
if sub_args.runmode == "init": | ||
print("--Initializing") | ||
input_files = init( | ||
repo_path=git_repo, output_path=sub_args.output, links=sub_args.input | ||
) | ||
|
||
# Required Step. Setup pipeline for execution, | ||
# dynamically create config.json config | ||
# file from user inputs and base config | ||
# determine "nidap folder" | ||
create_nidap_folder_YN = "no" | ||
if sub_args.create_nidap_folder: | ||
create_nidap_folder_YN = "yes" | ||
|
||
# templates | ||
config = setup( | ||
sub_args, | ||
repo_path=git_repo, | ||
output_path=sub_args.output, | ||
create_nidap_folder_YN=create_nidap_folder_YN, | ||
links=sub_args.input, | ||
) | ||
|
||
# Required Step. Resolve docker/singularity bind | ||
# paths from the config file. | ||
bindpaths = bind(sub_args, config=config) | ||
|
||
# Optional Step: Dry-run pipeline | ||
# if sub_args.dry_run: | ||
if sub_args.runmode == "dryrun" or sub_args.runmode == "run": | ||
print("--Dry-Run") | ||
# Dryrun pipeline | ||
dryrun_output = dryrun( | ||
outdir=sub_args.output | ||
) # python3 returns byte-string representation | ||
print( | ||
"\nDry-running XAVIER pipeline:\n{}".format(dryrun_output.decode("utf-8")) | ||
) | ||
|
||
# Optional Step. Orchestrate pipeline execution, | ||
# run pipeline in locally on a compute node | ||
# for debugging purposes or submit the master | ||
# job to the job scheduler, SLURM, and create | ||
# logging file | ||
if sub_args.runmode == "run": | ||
print("--Run full pipeline") | ||
if not exists(os.path.join(sub_args.output, "logfiles")): | ||
# Create directory for logfiles | ||
os.makedirs(os.path.join(sub_args.output, "logfiles")) | ||
if sub_args.mode == "local": | ||
log = os.path.join(sub_args.output, "logfiles", "snakemake.log") | ||
else: | ||
log = os.path.join(sub_args.output, "logfiles", "master.log") | ||
logfh = open(log, "w") | ||
wait = "" | ||
if sub_args.wait: | ||
wait = "--wait" | ||
mjob = runner( | ||
mode=sub_args.mode, | ||
outdir=sub_args.output, | ||
# additional_bind_paths = all_bind_paths, | ||
alt_cache=sub_args.singularity_cache, | ||
threads=int(sub_args.threads), | ||
jobname=sub_args.job_name, | ||
submission_script="runner", | ||
logger=logfh, | ||
additional_bind_paths=",".join(bindpaths), | ||
tmp_dir=sub_args.tmp_dir, | ||
wait=wait, | ||
) | ||
|
||
# Step 5. Wait for subprocess to complete, | ||
# this is blocking and not asynchronous | ||
if not sub_args.silent: | ||
print("\nRunning XAVIER pipeline in '{}' mode...".format(sub_args.mode)) | ||
mjob.wait() | ||
logfh.close() | ||
|
||
# Step 6. Relay information about submission | ||
# of the master job or the exit code of the | ||
# pipeline that ran in local mode | ||
if sub_args.mode == "local": | ||
if int(mjob.returncode) == 0: | ||
print("XAVIER has successfully completed") | ||
else: | ||
fatal( | ||
"XAVIER failed. Please see {} for more information.".format( | ||
os.path.join(sub_args.output, "logfiles", "snakemake.log") | ||
) | ||
) | ||
elif sub_args.mode == "slurm": | ||
jobid = ( | ||
open(os.path.join(sub_args.output, "logfiles", "mjobid.log")) | ||
.read() | ||
.strip() | ||
) | ||
if not sub_args.silent: | ||
if int(mjob.returncode) == 0: | ||
print("Successfully submitted master job: ", end="") | ||
else: | ||
fatal("Error occurred when submitting the master job.") | ||
print(jobid) | ||
|
||
|
||
def unlock(sub_args): | ||
"""Unlocks a previous runs output directory. If snakemake fails ungracefully, | ||
it maybe required to unlock the working directory before proceeding again. | ||
|
@@ -407,13 +296,19 @@ def parsed_arguments(): | |
# Suppressing help message of required args to overcome no sub-parser named groups | ||
subparser_run = subparsers.add_parser( | ||
"run", | ||
help="Run the XAVIER pipeline with input files.", | ||
help="Run the XAVIER pipeline with input files.", | ||
usage=argparse.SUPPRESS, | ||
formatter_class=argparse.RawDescriptionHelpFormatter, | ||
description=required_run_options, | ||
epilog=run_epilog, | ||
) | ||
|
||
subparser_gui = subparsers.add_parser( | ||
"gui", | ||
help="Launch the pipeline with a Graphical User Interface (GUI)", | ||
description="", | ||
) | ||
|
||
# Required Arguments | ||
# Input FastQ files | ||
subparser_run.add_argument( | ||
|
@@ -772,6 +667,7 @@ def parsed_arguments(): | |
subparser_run.set_defaults(func=run) | ||
subparser_unlock.set_defaults(func=unlock) | ||
subparser_cache.set_defaults(func=cache) | ||
subparser_gui.set_defaults(func=launch_gui) | ||
|
||
# Parse command-line args | ||
args = parser.parse_args() | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
import json | ||
import os | ||
import sys | ||
|
||
|
||
def get_singularity_cachedir(output_dir, cache_dir=None): | ||
"""Returns the singularity cache directory. | ||
If no user-provided cache directory is provided, | ||
the default singularity cache is in the output directory. | ||
""" | ||
if not cache_dir: | ||
cache_dir = os.path.join(output_dir, ".singularity") | ||
return cache_dir | ||
|
||
|
||
def get_sif_cache_dir(hpc=None): | ||
sif_dir = None | ||
if hpc == "biowulf": | ||
sif_dir = "/data/CCBR_Pipeliner/SIFS" | ||
elif hpc == "frce": | ||
sif_dir = "/mnt/projects/CCBR-Pipelines/SIFs" | ||
return sif_dir | ||
|
||
|
||
def image_cache(sub_args, config): | ||
"""Adds Docker Image URIs, or SIF paths to config if singularity cache option is provided. | ||
If singularity cache option is provided and a local SIF does not exist, a warning is | ||
displayed and the image will be pulled from URI in 'config/containers/images.json'. | ||
@param sub_args <parser.parse_args() object>: | ||
Parsed arguments for run sub-command | ||
@params config <file>: | ||
Docker Image config file | ||
@return config <dict>: | ||
Updated config dictionary containing user information (username and home directory) | ||
""" | ||
images = os.path.join(sub_args.output, "config", "containers", "images.json") | ||
|
||
# Read in config for docker image uris | ||
with open(images, "r") as fh: | ||
data = json.load(fh) | ||
# Check if local sif exists | ||
for image, uri in data["images"].items(): | ||
if sub_args.sif_cache: | ||
sif = os.path.join( | ||
sub_args.sif_cache, | ||
"{}.sif".format(os.path.basename(uri).replace(":", "_")), | ||
) | ||
if not os.path.exists(sif): | ||
# If local sif does not exist on in cache, print warning | ||
# and default to pulling from URI in config/containers/images.json | ||
print( | ||
'Warning: Local image "{}" does not exist in singularity cache'.format( | ||
sif | ||
), | ||
file=sys.stderr, | ||
) | ||
else: | ||
# Change pointer to image from Registry URI to local SIF | ||
data["images"][image] = sif | ||
|
||
config.update(data) | ||
|
||
return config |
Oops, something went wrong.