Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
d3e6603
Start adding functions for generating T1 maps and computing concentra…
cdaversin Feb 26, 2026
5ff58ae
Merge remote-tracking branch 'origin/main' into cecile/concentration_…
finsberg Mar 3, 2026
1fedb19
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 3, 2026
91e1e9a
Fix module import + start updating functions in t1 map
cdaversin Mar 5, 2026
2acf187
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 5, 2026
056ddee
Fix downloading test data command
cdaversin Mar 5, 2026
9236008
Add mri-dataset (from Gonzo) to the test-data dataset
cdaversin Mar 5, 2026
f12d58f
Updates in T1maps functions and add test for T1maps (mixed, LL, hybrid)
cdaversin Mar 6, 2026
73415d6
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 6, 2026
6c978b5
Fix compare_nifti_images
cdaversin Mar 6, 2026
61b034d
Merge - Fix compare_nifti_images
cdaversin Mar 6, 2026
3c945a8
Replace binary_erosion (deprecated) by erosion
cdaversin Mar 6, 2026
f2d7e9c
Skipping looklocker T1map test (too long) - Mixed and Hybrid T1maps t…
cdaversin Mar 6, 2026
e3dcd0f
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 6, 2026
b833bff
Add test for concentration map - passing
cdaversin Mar 6, 2026
eefc66a
Merge branch 'cecile/concentration_maps' of https://github.com/scient…
cdaversin Mar 6, 2026
07291a5
Minor - Change tolerance for test_concentration to 1e-12
cdaversin Mar 6, 2026
b8254e7
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 6, 2026
fab2c2d
Add new test data
finsberg Mar 6, 2026
6eaeb73
Add new test data
finsberg Mar 6, 2026
ac0c029
Fix typing issues
finsberg Mar 6, 2026
eadf0b6
Merge branch 'finsberg/concentration-maps' into cecile/concentration_…
finsberg Mar 6, 2026
f9e900c
Update test for datasets
finsberg Mar 6, 2026
6f15108
List files in test data folder
finsberg Mar 6, 2026
6209bf2
Fix path to test data
finsberg Mar 6, 2026
1c1e103
Add new linkt to data
finsberg Mar 6, 2026
3053e9c
Try different test folder
finsberg Mar 6, 2026
d559c8c
Separate between test folder and root test folder
finsberg Mar 6, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ jobs:
build:
needs: prepare-data
env:
MRITK_TEST_DATA_FOLDER: ./test_data
MRITK_TEST_DATA_FOLDER_ROOT: ./test_data
MRITK_TEST_DATA_FOLDER: ./test_data/mritk-test-data

name: Test on ${{ matrix.os }} with Python ${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
Expand All @@ -31,7 +32,12 @@ jobs:
uses: actions/download-artifact@v8
with:
name: shared-test-data
path: ${{ env.MRITK_TEST_DATA_FOLDER }} # Where you want the data to appear in this runner
path: ${{ env.MRITK_TEST_DATA_FOLDER_ROOT }} # Where you want the data to appear in this runner

- name: List downloaded files
run: |
echo "Files in ${{ env.MRITK_TEST_DATA_FOLDER_ROOT }}:"
ls -R ${{ env.MRITK_TEST_DATA_FOLDER_ROOT }}

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v6
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/setup-data.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
path: data/ # The folder you want to cache
# The key determines if we have a match.
# Change 'v1' to 'v2' manually to force a re-download in the future.
key: test-data-v2
key: test-data-v4


# 2. DOWNLOAD ONLY IF CACHE MISS
Expand Down
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ We use **pytest** for testing. Before submitting a PR, ensure all tests pass loc
Some tests require specific data. You can download this using the CLI included in the toolkit.
```bash
# Downloads data to the 'test_data' folder (or your preferred location)
python -m mritk download-test-data test_data
python -m mritk datasets download test-data -o test_data
```
*Note: You may need to set the `MRITK_TEST_DATA_FOLDER` environment variable if you download the data to a custom location.*

Expand Down
4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@ dependencies = [
"nibabel",
"pandas",
"scipy",
]
"scikit-image",
"pydicom",
"dcm2niix"]

[project.optional-dependencies]
show = [
Expand Down
8 changes: 8 additions & 0 deletions src/mritk/concentration/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com)
# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no)
# Copyright (C) 2026 Simula Research Laboratory


from . import concentration

__all__ = ["concentration"]
54 changes: 54 additions & 0 deletions src/mritk/concentration/concentration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
"""Concentration maps module

Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com)
Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no)
Copyright (C) 2026 Simula Research Laboratory
"""

from pathlib import Path
from typing import Optional

import numpy as np
from ..data.base import MRIData
from ..data.io import load_mri_data, save_mri_data
from ..data.orientation import assert_same_space


def concentration_from_T1(T1: np.ndarray, T1_0: np.ndarray, r1: float) -> np.ndarray:
C = 1 / r1 * (1 / T1 - 1 / T1_0)
return C


def concentration_from_R1(R1: np.ndarray, R1_0: np.ndarray, r1: float) -> np.ndarray:
C = 1 / r1 * (R1 - R1_0)
return C


def concentration(
input: Path,
reference: Path,
output: Optional[Path] = None,
r1: float = 0.0045,
mask: Optional[Path] = None,
) -> MRIData:
T1_mri = load_mri_data(input, np.single)
T10_mri = load_mri_data(reference, np.single)
assert_same_space(T1_mri, T10_mri)

if mask is not None:
mask_mri = load_mri_data(mask, bool)
assert_same_space(mask_mri, T10_mri)
mask_data = mask_mri.data * (T10_mri.data > 1e-10) * (T1_mri.data > 1e-10)
T1_mri.data *= mask_data
T10_mri.data *= mask_data
else:
mask_data = (T10_mri.data > 1e-10) * (T1_mri.data > 1e-10)
T1_mri.data[~mask_data] = np.nan
T10_mri.data[~mask_data] = np.nan

concentrations = np.nan * np.zeros_like(T10_mri.data)
concentrations[mask_data] = concentration_from_T1(T1=T1_mri.data[mask_data], T1_0=T10_mri.data[mask_data], r1=r1)
mri_data = MRIData(data=concentrations, affine=T10_mri.affine)
if output is not None:
save_mri_data(mri_data, output, np.single)
return mri_data
11 changes: 6 additions & 5 deletions src/mritk/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,11 @@
logger = logging.getLogger(__name__)


def download_link_google_drive(file_id: str) -> str:
# https://gist.github.com/tanaikech/f0f2d122e05bf5f971611258c22c110f
return f"https://drive.usercontent.google.com/download?id={file_id}&confirm=xxx"


@dataclass
class Dataset:
name: str
Expand All @@ -29,12 +34,8 @@ def get_datasets() -> dict[str, Dataset]:
"test-data": Dataset(
name="Test Data",
description="A small test dataset for testing functionality (based on the Gonzo dataset).",
doi="10.5281/zenodo.14266867",
license="CC-BY-4.0",
links={
"mri-processed.zip": "https://zenodo.org/records/14266867/files/mri-processed.zip?download=1",
"timetable.tsv": "https://github.com/jorgenriseth/gonzo/blob/main/mri_dataset/timetable.tsv?raw=true",
},
links={"mritk-test-data.zip": download_link_google_drive("1CSj3CHd4ztcU4Aqdlw9K2OWjPi5u75bd")},
),
"gonzo": Dataset(
name="The Gonzo Dataset",
Expand Down
8 changes: 8 additions & 0 deletions src/mritk/masking/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com)
# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no)
# Copyright (C) 2026 Simula Research Laboratory


from . import masks

__all__ = ["masks", "utils"]
72 changes: 72 additions & 0 deletions src/mritk/masking/masks.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
"""Intracranial and CSF masks generation module

Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com)
Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no)
Copyright (C) 2026 Simula Research Laboratory
"""

import numpy as np
import skimage
from typing import Optional
from pathlib import Path

from ..data.base import MRIData
from ..data.io import load_mri_data, save_mri_data
from ..data.orientation import assert_same_space
from .utils import largest_island


def create_csf_mask(
vol: np.ndarray,
connectivity: Optional[int] = 2,
use_li: bool = False,
) -> np.ndarray:
connectivity = connectivity or vol.ndim
if use_li:
thresh = skimage.filters.threshold_li(vol)
binary = vol > thresh
binary = largest_island(binary, connectivity=connectivity)
else:
(hist, bins) = np.histogram(vol[(vol > 0) * (vol < np.quantile(vol, 0.999))], bins=512)
thresh = skimage.filters.threshold_yen(hist=(hist, bins))
binary = vol > thresh
binary = largest_island(binary, connectivity=connectivity)
return binary


def csf_mask(
input: Path,
connectivity: Optional[int] = 2,
use_li: bool = False,
output: Path | None = None,
) -> MRIData:
input_vol = load_mri_data(input, dtype=np.single)
mask = create_csf_mask(input_vol.data, connectivity, use_li)
assert np.max(mask) > 0, "Masking failed, no voxels in mask"
mri_data = MRIData(data=mask, affine=input_vol.affine)
if output is not None:
save_mri_data(mri_data, output, dtype=np.uint8)
return mri_data


def create_intracranial_mask(csf_mask: MRIData, segmentation: MRIData) -> np.ndarray:
assert_same_space(csf_mask, segmentation)
combined_mask = csf_mask.data + segmentation.data.astype(bool)
background_mask = largest_island(~combined_mask, connectivity=1)
opened = skimage.morphology.binary_opening(background_mask, skimage.morphology.ball(3))
return ~opened
# return MRIData(data=~opened, affine=segmentation.affine)


def intracranial_mask(
csf_mask: Path,
segmentation: Path,
output: Optional[Path] = None,
) -> MRIData:
input_csf_mask = load_mri_data(csf_mask, dtype=bool)
segmentation_data = load_mri_data(segmentation, dtype=bool)
mask_data = create_intracranial_mask(input_csf_mask, segmentation_data)
mri_data = MRIData(data=mask_data, affine=segmentation_data.affine)
if output is not None:
save_mri_data(mri_data, output, dtype=np.uint8)
return mri_data
16 changes: 16 additions & 0 deletions src/mritk/masking/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
"""Masking utils

Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com)
Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no)
Copyright (C) 2026 Simula Research Laboratory
"""

import numpy as np
import skimage


def largest_island(mask: np.ndarray, connectivity: int = 1) -> np.ndarray:
newmask = skimage.measure.label(mask, connectivity=connectivity)
regions = skimage.measure.regionprops(newmask)
regions.sort(key=lambda x: x.num_pixels, reverse=True)
return newmask == regions[0].label
7 changes: 7 additions & 0 deletions src/mritk/t1_maps/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
"""
Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com)
Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no)
Copyright (C) 2026 Simula Research Laboratory
"""

__all__ = ["dicom_to_nifti", "t1_maps", "t1_to_r1", "utils"]
126 changes: 126 additions & 0 deletions src/mritk/t1_maps/dicom_to_nifti.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
"""MRI DICOM to NIfTI conversion Module

Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com)
Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no)
Copyright (C) 2026 Simula Research Laboratory
"""

import shutil
import subprocess
import tempfile
from pathlib import Path
from typing import Optional
import nibabel
import json

import numpy as np

from ..data.io import load_mri_data, save_mri_data
from ..t1_maps.utils import VOLUME_LABELS, read_dicom_trigger_times
from .utils import extract_single_volume, logger


def extract_mixed_dicom(dcmpath: Path, subvolumes: list[str]):
import pydicom

dcm = pydicom.dcmread(dcmpath)
frames_total = int(dcm.NumberOfFrames)
frames_per_volume = dcm[0x2001, 0x1018].value # [Number of Slices MR]
num_volumes = frames_total // frames_per_volume
assert num_volumes * frames_per_volume == frames_total, "Subvolume dimensions do not match"

D = dcm.pixel_array.astype(np.single)
frame_fg_sequence = dcm.PerFrameFunctionalGroupsSequence

vols_out = []
for volname in subvolumes:
vol_idx = VOLUME_LABELS.index(volname)

# Find volume slices representing current subvolume
subvol_idx_start = vol_idx * frames_per_volume
subvol_idx_end = (vol_idx + 1) * frames_per_volume
frame_fg = frame_fg_sequence[subvol_idx_start]
logger.info(
(
f"Converting volume {vol_idx + 1}/{len(VOLUME_LABELS)}: {volname} between indices"
+ f"{subvol_idx_start, subvol_idx_end} / {frames_total}."
)
)
mri = extract_single_volume(D[subvol_idx_start:subvol_idx_end], frame_fg)

nii_oriented = nibabel.nifti1.Nifti1Image(mri.data, mri.affine)
nii_oriented.set_sform(nii_oriented.affine, "scanner")
nii_oriented.set_qform(nii_oriented.affine, "scanner")

# Include meta-data
description = {
"TR": float(frame_fg.MRTimingAndRelatedParametersSequence[0].RepetitionTime),
"TE": float(frame_fg.MREchoSequence[0].EffectiveEchoTime),
}
if hasattr(frame_fg.MRModifierSequence[0], "InversionTimes"):
description["TI"] = frame_fg.MRModifierSequence[0].InversionTimes[0]
if hasattr(frame_fg.MRTimingAndRelatedParametersSequence[0], "EchoTrainLength"):
description["ETL"] = frame_fg.MRTimingAndRelatedParametersSequence[0].EchoTrainLength
vols_out.append({"nifti": nii_oriented, "descrip": description})
return vols_out


def dicom_to_looklocker(dicomfile: Path, outpath: Path):
outdir, form = outpath.parent, outpath.stem
outdir.mkdir(exist_ok=True, parents=True)
times = read_dicom_trigger_times(dicomfile)
np.savetxt(f"{outdir}/{form}" + "_trigger_times.txt", times)

with tempfile.TemporaryDirectory(prefix=outpath.stem) as tmpdir:
tmppath = Path(tmpdir)
cmd = f"dcm2niix -f {form} -z y --ignore_trigger_times -o '{tmppath}' '{dicomfile}' > /tmp/dcm2niix.txt"
subprocess.run(cmd, shell=True, check=True)
shutil.copy(
tmppath / f"{form}.json",
outpath.with_suffix(".json"),
)
mri = load_mri_data(tmppath / f"{form}.nii.gz", dtype=np.double)
save_mri_data(mri, outpath.with_suffix(".nii.gz"), dtype=np.single, intent_code=2001)


def dicom_to_mixed(
dcmpath: Path,
outpath: Path,
subvolumes: Optional[list[str]] = None,
):
subvolumes = subvolumes or VOLUME_LABELS
assert all([volname in VOLUME_LABELS for volname in subvolumes]), (
f"Invalid subvolume name in {subvolumes}, not in {VOLUME_LABELS}"
)
outdir, form = outpath.parent, outpath.stem
outdir.mkdir(exist_ok=True, parents=True)

vols = extract_mixed_dicom(dcmpath, subvolumes)
meta = {}
for vol, volname in zip(vols, subvolumes):
output = outpath.with_name(outpath.stem + "_" + volname + ".nii.gz")

nii = vol["nifti"]
descrip = vol["descrip"]
nibabel.nifti1.save(nii, output)
try:
if volname == "SE-modulus":
meta["TR_SE"] = descrip["TR"]
meta["TE"] = descrip["TE"]
meta["ETL"] = descrip["ETL"]
elif volname == "IR-corrected-real":
meta["TR_IR"] = descrip["TR"]
meta["TI"] = descrip["TI"]
except KeyError as e:
print(volname, descrip)
raise e

with open(outpath.parent / f"{form}_meta.json", "w") as f:
json.dump(meta, f)

try:
cmd = f"dcm2niix -w 0 --terse -b o -f '{form}' -o '{outdir}' '{dcmpath}' >> /tmp/dcm2niix.txt "
subprocess.run(cmd, shell=True).check_returncode()
except (ValueError, subprocess.CalledProcessError) as e:
print(str(e))
pass
Loading
Loading