Skip to content

Commit

Permalink
fix: refactor CLI and IO modules for improved functionality and type …
Browse files Browse the repository at this point in the history
…safety

- Refactored CLI configuration to use `pydantic-settings` for better type safety and configuration management.
- Updated IO module to use a registration system for conversions between different mesh types, improving flexibility and maintainability.
- Enhanced type annotations and removed redundant code for better readability and maintainability.
- Added new modules for DICOM, Open3D, and PyVista IO operations, providing more robust support for various data formats.
- Fixed and improved various functions and methods across the codebase to ensure consistent behavior and type handling.

BREAKING CHANGE: The refactoring of the CLI and IO modules may require updates to existing scripts and configurations that rely on the previous implementation.
  • Loading branch information
liblaf committed Sep 15, 2024
1 parent c809f1c commit 1846d7c
Show file tree
Hide file tree
Showing 128 changed files with 3,166 additions and 1,463 deletions.
9 changes: 9 additions & 0 deletions .cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,18 @@
"confz",
"corotated",
"cuda",
"customise",
"cyipopt",
"cython",
"dcmread",
"denorm",
"dicom",
"disp",
"dmypy",
"dotenv",
"dvclive",
"elems",
"fpfh",
"frobenius",
"gltf",
"hstack",
Expand Down Expand Up @@ -53,6 +58,8 @@
"pvtu",
"pybuilder",
"pycache",
"pydantic",
"pydicom",
"pydocstyle",
"pyenv",
"pyflow",
Expand All @@ -67,7 +74,9 @@
"pytetwild",
"pytype",
"pyvista",
"pyyaml",
"qdwh",
"rint",
"rtol",
"scipy",
"scrapy",
Expand Down
8 changes: 6 additions & 2 deletions .ruff.toml
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
builtins = ["ic"]
fix = true
preview = true
show-fixes = true
target-version = "py311"

[format]
docstring-code-format = true

[lint]
explicit-preview-rules = true
extend-select = ["RUF022"]
ignore = [
"ANN",
"ARG",
Expand All @@ -16,10 +20,9 @@ ignore = [
"ERA",
"FIX",
"INP",
"ISC",
"N802",
"N803",
"N806",
"PD008",
"PLR09",
"PLR2004",
"RET504",
Expand All @@ -34,6 +37,7 @@ select = ["ALL"]
"jax.typing" = "jxt"
"numpy.typing" = "npt"
"pyvista" = "pv"
"subprocess" = "sp"
"taichi" = "ti"
"trimesh" = "tm"

Expand Down
43 changes: 21 additions & 22 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,24 +1,23 @@
{
"files.exclude": {
"**/__pycache__/": true,
"**/.pixi/": true,
"**/.pytest_cache/": true,
"**/*.benchmarks/": true,
"**/*.cache/": true,
"**/*.egg-info/": true,
"**/*.ruff_cache/": true
},
"python.analysis.diagnosticMode": "workspace",
"python.analysis.include": ["src"],
"python.testing.pytestArgs": ["src/tests/"],
"python.testing.pytestEnabled": true,
"python.testing.unittestEnabled": false,
"yaml.customTags": [
"!ENV scalar",
"!ENV sequence",
"!relative scalar",
"tag:yaml.org,2002:python/name:material.extensions.emoji.to_svg",
"tag:yaml.org,2002:python/name:material.extensions.emoji.twemoji",
"tag:yaml.org,2002:python/name:pymdownx.superfences.fence_code_format"
]
"files.exclude": {
"**/__pycache__/": true,
"**/.pixi/": true,
"**/.pytest_cache/": true,
"**/*.benchmarks/": true,
"**/*.cache/": true,
"**/*.egg-info/": true,
"**/*.ruff_cache/": true
},
"python.analysis.diagnosticMode": "workspace",
"python.testing.pytestArgs": ["src/tests/"],
"python.testing.pytestEnabled": true,
"python.testing.unittestEnabled": false,
"yaml.customTags": [
"!ENV scalar",
"!ENV sequence",
"!relative scalar",
"tag:yaml.org,2002:python/name:material.extensions.emoji.to_svg",
"tag:yaml.org,2002:python/name:material.extensions.emoji.twemoji",
"tag:yaml.org,2002:python/name:pymdownx.superfences.fence_code_format"
]
}
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import pyvista as pv


def E_nu_to_lambda_mu(E: float, nu: float) -> tuple[float, float]: # noqa: N802, N803
def E_nu_to_lambda_mu(E: float, nu: float) -> tuple[float, float]:
lambda_: float = E * nu / ((1 + nu) * (1 - 2 * nu))
mu: float = E / (2 * (1 + nu))
return lambda_, mu
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
1 change: 0 additions & 1 deletion exp/2024/09/01/real-data/src/prepare.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import numpy as np
import pyvista as pv
from icecream import ic
from sympy import inverse_laplace_transform

import mkit
from mkit.typing import StrPath
Expand Down
3 changes: 0 additions & 3 deletions exp/2024/09/01/register/src/hello.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
from typing import Any

import numpy as np
import numpy.typing as npt
import pyvista as pv
from icecream import ic

Expand Down
90 changes: 90 additions & 0 deletions exp/2024/09/12/organize-ct/src/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
import collections
import concurrent.futures
import shutil
from pathlib import Path
from typing import Literal

import pydantic
import pydicom

import mkit


class CLIConfig(mkit.cli.CLIBaseConfig):
raw: Path
output: Path


class Acquisition(pydantic.BaseModel):
age: str
birth_date: str
date: str
id: str
name: str
sex: Literal["F", "M"]
time: str


class Patient(pydantic.BaseModel):
acquisitions: list[Acquisition]
birth_date: str
id: str
name: str
sex: Literal["F", "M"]


class Dataset(pydantic.BaseModel):
patients: dict[str, Patient]


def extract_meta(data: pydicom.FileDataset) -> Acquisition:
return Acquisition(
age=data["PatientAge"].value,
birth_date=data["PatientBirthDate"].value,
date=data["AcquisitionDate"].value,
id=data["PatientID"].value,
name=str(data["PatientName"].value),
sex=data["PatientSex"].value,
time=data["AcquisitionTime"].value,
)


def process_acquisition(dirfile_fpath: Path, output_dir: Path) -> Acquisition:
dirfile: pydicom.FileDataset = pydicom.dcmread(dirfile_fpath)
seq: pydicom.DataElement = dirfile["DirectoryRecordSequence"]
file_id: pydicom.DataElement = seq[0]["ReferencedFileID"]
record_fpath: Path = dirfile_fpath.with_name(file_id[-1])
record: pydicom.FileDataset = pydicom.dcmread(record_fpath)
meta: Acquisition = extract_meta(record)
ic(meta)
dpath: Path = output_dir / meta.id / meta.date
dpath.mkdir(parents=True, exist_ok=True)
shutil.copytree(dirfile_fpath.parent, dpath, dirs_exist_ok=True)
return meta


@mkit.cli.auto_run
def main(cfg: CLIConfig) -> None:
patients: dict[str, list[Acquisition]] = collections.defaultdict(list)
with concurrent.futures.ThreadPoolExecutor() as executor:
futures: list[concurrent.futures.Future[Acquisition]] = [
executor.submit(process_acquisition, fpath, cfg.output)
for fpath in cfg.raw.rglob("DIRFILE")
]
for future in concurrent.futures.as_completed(futures):
meta: Acquisition = future.result()
patients[meta.id].append(meta)
dataset = Dataset(patients={})
for id_, acquisitions in patients.items():
acquisitions.sort(key=lambda x: x.date)
fpath: Path = cfg.output / id_ / "patient.json"
patient: Patient = Patient(
acquisitions=acquisitions,
birth_date=acquisitions[0].birth_date,
id=id_,
name=acquisitions[0].name,
sex=acquisitions[0].sex,
)
fpath.write_text(patient.model_dump_json())
dataset.patients[id_] = patient
(cfg.output / "dataset.json").write_text(dataset.model_dump_json())
12 changes: 12 additions & 0 deletions exp/2024/09/12/organize-ct/src/read-dataset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from pathlib import Path

import mkit


class CLIConfig(mkit.cli.CLIBaseConfig):
dpath: Path


@mkit.cli.auto_run
def main(cfg: CLIConfig) -> None:
dataset = mkit.io.cfg.dpath
60 changes: 60 additions & 0 deletions exp/2024/09/12/registration/src/global.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
from pathlib import Path
from typing import TYPE_CHECKING

import numpy as np
import pyvista as pv

import mkit
import mkit.typing.numpy as nt
from mkit.typing import StrPath

if TYPE_CHECKING:
import open3d as o3d

from mkit.ops.registration._global._result import GlobalRegistrationResult


class Config(mkit.cli.BaseConfig):
template: Path


def read_obj(fpath: StrPath) -> pv.PolyData:
fpath: Path = Path(fpath)
group_dup: list[str] = []
for line in mkit.utils.strip_comments(fpath.read_text()):
if line.startswith("g"):
name: str
_, name = line.split()
group_dup.append(name)
group_uniq: list[str] = list(set(group_dup))
dup_to_uniq: dict[str, int] = {name: i for i, name in enumerate(group_uniq)}
mesh: pv.PolyData = pv.read(fpath)
group_id: nt.IN = mkit.math.numpy.cast(mesh.cell_data["GroupIds"], int)
group_id = np.asarray([dup_to_uniq[group_dup[i]] for i in group_id])
mesh.cell_data["GroupIds"] = group_id
mesh.field_data["GroupNames"] = group_uniq
return mesh


@mkit.cli.auto_run()
def main(cfg: Config) -> None:
if False:
demo = o3d.data.DemoICPPointClouds()
source: o3d.geometry.PointCloud = o3d.io.read_point_cloud(demo.paths[0])
source_pv: pv.PolyData = pv.wrap(np.asarray(source.points))
target: o3d.geometry.PointCloud = o3d.io.read_point_cloud(demo.paths[1])
target_pv: pv.PolyData = pv.wrap(np.asarray(target.points))
ic(0.05 / source_pv.length)
ic(0.05 / target_pv.length)
return
source: pv.PolyData = read_obj(cfg.template)
target: pv.PolyData = mkit.ext.sculptor.get_template_skull()
source.save("data/source.vtp")
target.save("data/target.vtp")
result: GlobalRegistrationResult = mkit.ops.registration.global_registration(
source, target
)
source.transform(result.transform, inplace=True)
ic(np.unique(source.cell_data["GroupIds"]))
ic(source.field_data["GroupNames"])
source.save("data/result.vtp")
30 changes: 30 additions & 0 deletions exp/2024/09/12/registration/src/rigid.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from pathlib import Path
from typing import TYPE_CHECKING

import numpy as np

import mkit

if TYPE_CHECKING:
import pyvista as pv

from mkit.ops.registration.rigid._result import RigidRegistrationResult


class Config(mkit.cli.BaseConfig):
template: Path


@mkit.cli.auto_run()
def main(cfg: Config) -> None:
source: pv.PolyData = mkit.io.pyvista.read_poly_data(cfg.template)
target: pv.PolyData = mkit.ext.sculptor.get_template_skull()
source.save("data/source.vtp")
target.save("data/target.vtp")
result: RigidRegistrationResult = mkit.ops.registration.rigid_registration(
source, target
)
source.transform(result.transform, inplace=True)
ic(np.unique(source.cell_data["GroupIds"]))
ic(source.field_data["GroupNames"])
source.save("data/result.vtp")
11 changes: 6 additions & 5 deletions mkdocs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ theme:
- navigation.instant.prefetch
- navigation.instant.progress
- navigation.path
- navigation.prune
# - navigation.sections
# - navigation.prune
- navigation.sections
- navigation.tabs
- navigation.tabs.sticky
- navigation.top
Expand Down Expand Up @@ -87,14 +87,15 @@ plugins:
- https://trimesh.org/objects.inv
options:
# General
show_inheritance_diagram: true
show_source: false
# Headings
show_symbol_type_heading: true
show_symbol_type_toc: true
# Members
inherited_members: true
filters:
- "!^_[^_]"
- "!__all__"
summary: true
# Docstrings
docstring_section_style: list
Expand All @@ -105,6 +106,6 @@ plugins:
signature_crossrefs: true

watch:
- docs
- src/mkit
- docs/
- src/mkit/
- tools/gen-ref-pages.py
Loading

0 comments on commit 1846d7c

Please sign in to comment.