Skip to content

Commit

Permalink
Merge pull request #645 from effigies/mnt/refurb
Browse files Browse the repository at this point in the history
STY: Miscellaneous cleanups
  • Loading branch information
effigies authored May 3, 2023
2 parents 64bfa89 + 4da0a35 commit 426564e
Show file tree
Hide file tree
Showing 30 changed files with 191 additions and 204 deletions.
4 changes: 2 additions & 2 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,12 @@ docs_deploy: &docs
name: Deploy docs to gh-pages branch
command: gh-pages --no-history --dotfiles --message "doc(update) [skip ci]" --dist docs/_build/html

version: 2
version: 2.1
jobs:

build_docs:
docker:
- image: python:3.7.4
- image: cimg/python:3.11
working_directory: /tmp/gh-pages
environment:
- FSLOUTPUTTYPE: NIFTI
Expand Down
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@
napoleon_custom_sections = [("Inputs", "Parameters"), ("Outputs", "Parameters")]

# Intersphinx
intersphinx_mapping = {"https://docs.python.org/3/": None}
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}

# Linkcode
# The following is used by sphinx.ext.linkcode to provide links to github
Expand Down
23 changes: 16 additions & 7 deletions pydra/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,28 @@

import logging

logger = logging.getLogger("pydra")
import __main__
import attr

from . import mark
from .engine import AuditFlag, DockerTask, ShellCommandTask, Submitter, Workflow, specs

__all__ = (
"Submitter",
"Workflow",
"AuditFlag",
"ShellCommandTask",
"DockerTask",
"specs",
"mark",
)

try:
from ._version import __version__
except ImportError:
pass

from .engine import Submitter, Workflow, AuditFlag, ShellCommandTask, DockerTask, specs
from . import mark
logger = logging.getLogger("pydra")


def check_latest_version():
Expand All @@ -29,8 +42,6 @@ def check_latest_version():


# Run telemetry on import for interactive sessions, such as IPython, Jupyter notebooks, Python REPL
import __main__

if not hasattr(__main__, "__file__"):
from .engine.core import TaskBase

Expand All @@ -39,8 +50,6 @@ def check_latest_version():


# attr run_validators is set to False, but could be changed using use_validator
import attr

attr.set_run_validators(False)


Expand Down
11 changes: 7 additions & 4 deletions pydra/engine/audit.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
"""Module to keep track of provenance information."""
import os
from pathlib import Path
import json
import attr
from ..utils.messenger import send_message, make_message, gen_uuid, now, AuditFlag
from .helpers import ensure_list, gather_runtime_info, hash_file
from .specs import attr_fields, File, Directory

try:
import importlib_resources
except ImportError:
import importlib.resources as importlib_resources # type: ignore


class Audit:
"""Handle provenance tracking and resource utilization."""
Expand Down Expand Up @@ -133,9 +137,8 @@ def audit_message(self, message, flags=None):
"""
if self.develop:
with open(
Path(os.path.dirname(__file__)) / ".." / "schema/context.jsonld"
) as fp:
context_file = importlib_resources.files("pydra") / "schema/context.jsonld"
with context_file.open() as fp:
context = json.load(fp)
else:
context = {
Expand Down
6 changes: 3 additions & 3 deletions pydra/engine/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -491,7 +491,7 @@ def _run(self, rerun=False, **kwargs):
lockfile = self.cache_dir / (checksum + ".lock")
# Eagerly retrieve cached - see scenarios in __init__()
self.hooks.pre_run(self)
logger.debug(f"'%s' is attempting to acquire lock on %s", self.name, lockfile)
logger.debug("'%s' is attempting to acquire lock on %s", self.name, lockfile)
with SoftFileLock(lockfile):
if not (rerun or self.task_rerun):
result = self.result()
Expand Down Expand Up @@ -1105,7 +1105,7 @@ async def _run(self, submitter=None, rerun=False, **kwargs):
lockfile = self.cache_dir / (checksum + ".lock")
self.hooks.pre_run(self)
logger.debug(
f"'%s' is attempting to acquire lock on %s with Pydra lock",
"'%s' is attempting to acquire lock on %s with Pydra lock",
self.name,
lockfile,
)
Expand Down Expand Up @@ -1231,7 +1231,7 @@ def _collect_outputs(self):
err_file = getattr(self, val.name).output_dir / "_error.pklz"
raise ValueError(
f"Task {val.name} raised an error, full crash report is here: "
f"{str(err_file)}"
f"{err_file}"
)
return attr.evolve(output, **output_wf)

Expand Down
4 changes: 2 additions & 2 deletions pydra/engine/helpers_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -606,7 +606,7 @@ def template_update(inputs, output_dir, state_ind=None, map_copyfiles=None):
for fld in fields_templ:
if fld.type not in [str, ty.Union[str, bool]]:
raise Exception(
f"fields with output_file_template"
"fields with output_file_template"
" has to be a string or Union[str, bool]"
)
dict_mod[fld.name] = template_update_single(
Expand Down Expand Up @@ -638,7 +638,7 @@ def template_update_single(
if spec_type == "input":
if field.type not in [str, ty.Union[str, bool]]:
raise Exception(
f"fields with output_file_template"
"fields with output_file_template"
"has to be a string or Union[str, bool]"
)
inp_val_set = inputs_dict_st[field.name]
Expand Down
2 changes: 1 addition & 1 deletion pydra/engine/helpers_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,7 @@ def _add_name(mlist, name):
if "." in elem or elem.startswith("_"):
pass
else:
mlist[i] = "{}.{}".format(name, mlist[i])
mlist[i] = f"{name}.{mlist[i]}"
elif isinstance(elem, list):
mlist[i] = _add_name(elem, name)
elif isinstance(elem, tuple):
Expand Down
12 changes: 5 additions & 7 deletions pydra/engine/state.py
Original file line number Diff line number Diff line change
Expand Up @@ -454,13 +454,11 @@ def _remove_repeated(self, previous_splitters):
f"{self.other_states}"
)

repeated = set(
[
(el, previous_splitters.count(el))
for el in previous_splitters
if previous_splitters.count(el) > 1
]
)
repeated = {
(el, previous_splitters.count(el))
for el in previous_splitters
if previous_splitters.count(el) > 1
}
if repeated:
# assuming that I want to remove from right
previous_splitters.reverse()
Expand Down
23 changes: 23 additions & 0 deletions pydra/engine/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import pytest
from pydra import set_input_validator

try:
import importlib_resources
except ImportError:
import importlib.resources as importlib_resources


@pytest.fixture(scope="package")
def data_tests_dir():
test_nii = importlib_resources.files("pydra").joinpath(
"engine", "tests", "data_tests"
)
with importlib_resources.as_file(test_nii) as path:
yield path


@pytest.fixture()
def use_validator():
set_input_validator(flag=True)
yield None
set_input_validator(flag=False)
3 changes: 0 additions & 3 deletions pydra/engine/tests/data_tests/loading.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
import os


def loading(filename):
with open(filename) as f:
txt = f.read()
Expand Down
3 changes: 0 additions & 3 deletions pydra/engine/tests/data_tests/saving.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
import os


def saving(filename):
with open(filename, "w") as f:
f.write("Hello!")
Expand Down
26 changes: 11 additions & 15 deletions pydra/engine/tests/test_boutiques.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import os, shutil
import shutil
import subprocess as sp
from pathlib import Path
import attr
import pytest

Expand All @@ -9,7 +8,6 @@
from ..submitter import Submitter
from ..boutiques import BoshTask
from .utils import result_no_submitter, result_submitter, no_win
from ...engine.specs import File

need_bosh_docker = pytest.mark.skipif(
shutil.which("docker") is None
Expand All @@ -18,8 +16,6 @@
reason="requires docker and bosh",
)

Infile = Path(__file__).resolve().parent / "data_tests" / "test.nii.gz"

pytestmark = pytest.mark.skip()


Expand All @@ -30,10 +26,10 @@
"maskfile", ["test_brain.nii.gz", "test_brain", "test_brain.nii"]
)
@pytest.mark.parametrize("results_function", [result_no_submitter, result_submitter])
def test_boutiques_1(maskfile, plugin, results_function, tmpdir):
def test_boutiques_1(maskfile, plugin, results_function, tmpdir, data_tests_dir):
"""simple task to run fsl.bet using BoshTask"""
btask = BoshTask(name="NA", zenodo_id="1482743")
btask.inputs.infile = Infile
btask.inputs.infile = data_tests_dir / "test.nii.gz"
btask.inputs.maskfile = maskfile
btask.cache_dir = tmpdir
res = results_function(btask, plugin)
Expand All @@ -50,12 +46,12 @@ def test_boutiques_1(maskfile, plugin, results_function, tmpdir):
@no_win
@need_bosh_docker
@pytest.mark.flaky(reruns=3)
def test_boutiques_spec_1():
def test_boutiques_spec_1(data_tests_dir):
"""testing spec: providing input/output fields names"""
btask = BoshTask(
name="NA",
zenodo_id="1482743",
infile=Infile,
infile=data_tests_dir / "test.nii.gz",
maskfile="test_brain.nii.gz",
input_spec_names=["infile", "maskfile"],
output_spec_names=["outfile", "out_outskin_off"],
Expand All @@ -75,12 +71,12 @@ def test_boutiques_spec_1():
@no_win
@need_bosh_docker
@pytest.mark.flaky(reruns=3)
def test_boutiques_spec_2():
def test_boutiques_spec_2(data_tests_dir):
"""testing spec: providing partial input/output fields names"""
btask = BoshTask(
name="NA",
zenodo_id="1482743",
infile=Infile,
infile=data_tests_dir / "test.nii.gz",
maskfile="test_brain.nii.gz",
input_spec_names=["infile"],
output_spec_names=[],
Expand All @@ -101,11 +97,11 @@ def test_boutiques_spec_2():
@pytest.mark.parametrize(
"maskfile", ["test_brain.nii.gz", "test_brain", "test_brain.nii"]
)
def test_boutiques_wf_1(maskfile, plugin, tmpdir):
def test_boutiques_wf_1(maskfile, plugin, tmpdir, infile):
"""wf with one task that runs fsl.bet using BoshTask"""
wf = Workflow(name="wf", input_spec=["maskfile", "infile"])
wf.inputs.maskfile = maskfile
wf.inputs.infile = Infile
wf.inputs.infile = infile
wf.cache_dir = tmpdir

wf.add(
Expand Down Expand Up @@ -134,11 +130,11 @@ def test_boutiques_wf_1(maskfile, plugin, tmpdir):
@pytest.mark.parametrize(
"maskfile", ["test_brain.nii.gz", "test_brain", "test_brain.nii"]
)
def test_boutiques_wf_2(maskfile, plugin, tmpdir):
def test_boutiques_wf_2(maskfile, plugin, tmpdir, infile):
"""wf with two BoshTasks (fsl.bet and fsl.stats) and one ShellTask"""
wf = Workflow(name="wf", input_spec=["maskfile", "infile"])
wf.inputs.maskfile = maskfile
wf.inputs.infile = Infile
wf.inputs.infile = infile
wf.cache_dir = tmpdir

wf.add(
Expand Down
27 changes: 13 additions & 14 deletions pydra/engine/tests/test_dockertask.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import os
import pytest
import attr

Expand Down Expand Up @@ -71,9 +70,7 @@ def test_docker_1_dockerflag_exception(plugin):
"""using ShellComandTask with container_info=("docker"), no image provided"""
cmd = "whoami"
with pytest.raises(Exception) as excinfo:
shocky = ShellCommandTask(
name="shocky", executable=cmd, container_info=("docker")
)
ShellCommandTask(name="shocky", executable=cmd, container_info=("docker"))
assert "container_info has to have 2 elements" in str(excinfo.value)


Expand Down Expand Up @@ -509,19 +506,17 @@ def test_wf_docker_1_dockerflag(plugin, tmpdir):
@no_win
@need_docker
@pytest.mark.skip(reason="we probably don't want to support bindings as an input")
def test_wf_docker_2pre(plugin, tmpdir):
def test_wf_docker_2pre(plugin, tmpdir, data_tests_dir):
"""a workflow with two connected task that run python scripts
the first one creates a text file and the second one reads the file
"""

scripts_dir = os.path.join(os.path.dirname(__file__), "data_tests")

cmd1 = ["python", "/scripts/saving.py", "-f", "/outputs/tmp.txt"]
dt = DockerTask(
name="save",
image="python:3.7-alpine",
executable=cmd1,
bindings=[(str(tmpdir), "/outputs"), (scripts_dir, "/scripts", "ro")],
bindings=[(str(tmpdir), "/outputs"), (str(data_tests_dir), "/scripts", "ro")],
strip=True,
)
res = dt(plugin=plugin)
Expand All @@ -531,13 +526,11 @@ def test_wf_docker_2pre(plugin, tmpdir):
@no_win
@need_docker
@pytest.mark.skip(reason="we probably don't want to support bindings as an input")
def test_wf_docker_2(plugin, tmpdir):
def test_wf_docker_2(plugin, tmpdir, data_tests_dir):
"""a workflow with two connected task that run python scripts
the first one creates a text file and the second one reads the file
"""

scripts_dir = os.path.join(os.path.dirname(__file__), "data_tests")

wf = Workflow(name="wf", input_spec=["cmd1", "cmd2"])
wf.inputs.cmd1 = ["python", "/scripts/saving.py", "-f", "/outputs/tmp.txt"]
wf.inputs.cmd2 = ["python", "/scripts/loading.py", "-f"]
Expand All @@ -546,7 +539,10 @@ def test_wf_docker_2(plugin, tmpdir):
name="save",
image="python:3.7-alpine",
executable=wf.lzin.cmd1,
bindings=[(str(tmpdir), "/outputs"), (scripts_dir, "/scripts", "ro")],
bindings=[
(str(tmpdir), "/outputs"),
(str(data_tests_dir), "/scripts", "ro"),
],
strip=True,
)
)
Expand All @@ -556,7 +552,10 @@ def test_wf_docker_2(plugin, tmpdir):
image="python:3.7-alpine",
executable=wf.lzin.cmd2,
args=wf.save.lzout.stdout,
bindings=[(str(tmpdir), "/outputs"), (scripts_dir, "/scripts", "ro")],
bindings=[
(str(tmpdir), "/outputs"),
(str(data_tests_dir), "/scripts", "ro"),
],
strip=True,
)
)
Expand Down Expand Up @@ -1078,7 +1077,7 @@ def test_docker_inputspec_3a(plugin, tmpdir):
)

with pytest.raises(Exception) as excinfo:
res = docky()
docky()
assert "use field.metadata['container_path']=True" in str(excinfo.value)


Expand Down
Loading

0 comments on commit 426564e

Please sign in to comment.