diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b8c0068..6b84d91 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -12,7 +12,8 @@ jobs: build: needs: prepare-data env: - MRITK_TEST_DATA_FOLDER: ./test_data + MRITK_TEST_DATA_FOLDER_ROOT: ./test_data + MRITK_TEST_DATA_FOLDER: ./test_data/mritk-test-data name: Test on ${{ matrix.os }} with Python ${{ matrix.python-version }} runs-on: ${{ matrix.os }} @@ -31,7 +32,12 @@ jobs: uses: actions/download-artifact@v8 with: name: shared-test-data - path: ${{ env.MRITK_TEST_DATA_FOLDER }} # Where you want the data to appear in this runner + path: ${{ env.MRITK_TEST_DATA_FOLDER_ROOT }} # Where you want the data to appear in this runner + + - name: List downloaded files + run: | + echo "Files in ${{ env.MRITK_TEST_DATA_FOLDER_ROOT }}:" + ls -R ${{ env.MRITK_TEST_DATA_FOLDER_ROOT }} - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v6 diff --git a/.github/workflows/setup-data.yml b/.github/workflows/setup-data.yml index 98869b9..33dcf74 100644 --- a/.github/workflows/setup-data.yml +++ b/.github/workflows/setup-data.yml @@ -22,7 +22,7 @@ jobs: path: data/ # The folder you want to cache # The key determines if we have a match. # Change 'v1' to 'v2' manually to force a re-download in the future. - key: test-data-v2 + key: test-data-v4 # 2. DOWNLOAD ONLY IF CACHE MISS diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6f4c966..dd573cc 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -90,7 +90,7 @@ We use **pytest** for testing. Before submitting a PR, ensure all tests pass loc Some tests require specific data. You can download this using the CLI included in the toolkit. ```bash # Downloads data to the 'test_data' folder (or your preferred location) - python -m mritk download-test-data test_data + python -m mritk datasets download test-data -o test_data ``` *Note: You may need to set the `MRITK_TEST_DATA_FOLDER` environment variable if you download the data to a custom location.* diff --git a/pyproject.toml b/pyproject.toml index 1c57521..f8f354b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,9 @@ dependencies = [ "nibabel", "pandas", "scipy", -] + "scikit-image", + "pydicom", + "dcm2niix"] [project.optional-dependencies] show = [ diff --git a/src/mritk/concentration/__init__.py b/src/mritk/concentration/__init__.py new file mode 100644 index 0000000..9e93ffa --- /dev/null +++ b/src/mritk/concentration/__init__.py @@ -0,0 +1,8 @@ +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + + +from . import concentration + +__all__ = ["concentration"] diff --git a/src/mritk/concentration/concentration.py b/src/mritk/concentration/concentration.py new file mode 100644 index 0000000..08c0a67 --- /dev/null +++ b/src/mritk/concentration/concentration.py @@ -0,0 +1,54 @@ +"""Concentration maps module + +Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +Copyright (C) 2026 Simula Research Laboratory +""" + +from pathlib import Path +from typing import Optional + +import numpy as np +from ..data.base import MRIData +from ..data.io import load_mri_data, save_mri_data +from ..data.orientation import assert_same_space + + +def concentration_from_T1(T1: np.ndarray, T1_0: np.ndarray, r1: float) -> np.ndarray: + C = 1 / r1 * (1 / T1 - 1 / T1_0) + return C + + +def concentration_from_R1(R1: np.ndarray, R1_0: np.ndarray, r1: float) -> np.ndarray: + C = 1 / r1 * (R1 - R1_0) + return C + + +def concentration( + input: Path, + reference: Path, + output: Optional[Path] = None, + r1: float = 0.0045, + mask: Optional[Path] = None, +) -> MRIData: + T1_mri = load_mri_data(input, np.single) + T10_mri = load_mri_data(reference, np.single) + assert_same_space(T1_mri, T10_mri) + + if mask is not None: + mask_mri = load_mri_data(mask, bool) + assert_same_space(mask_mri, T10_mri) + mask_data = mask_mri.data * (T10_mri.data > 1e-10) * (T1_mri.data > 1e-10) + T1_mri.data *= mask_data + T10_mri.data *= mask_data + else: + mask_data = (T10_mri.data > 1e-10) * (T1_mri.data > 1e-10) + T1_mri.data[~mask_data] = np.nan + T10_mri.data[~mask_data] = np.nan + + concentrations = np.nan * np.zeros_like(T10_mri.data) + concentrations[mask_data] = concentration_from_T1(T1=T1_mri.data[mask_data], T1_0=T10_mri.data[mask_data], r1=r1) + mri_data = MRIData(data=concentrations, affine=T10_mri.affine) + if output is not None: + save_mri_data(mri_data, output, np.single) + return mri_data diff --git a/src/mritk/datasets.py b/src/mritk/datasets.py index e44834a..bf7a23e 100644 --- a/src/mritk/datasets.py +++ b/src/mritk/datasets.py @@ -15,6 +15,11 @@ logger = logging.getLogger(__name__) +def download_link_google_drive(file_id: str) -> str: + # https://gist.github.com/tanaikech/f0f2d122e05bf5f971611258c22c110f + return f"https://drive.usercontent.google.com/download?id={file_id}&confirm=xxx" + + @dataclass class Dataset: name: str @@ -29,12 +34,8 @@ def get_datasets() -> dict[str, Dataset]: "test-data": Dataset( name="Test Data", description="A small test dataset for testing functionality (based on the Gonzo dataset).", - doi="10.5281/zenodo.14266867", license="CC-BY-4.0", - links={ - "mri-processed.zip": "https://zenodo.org/records/14266867/files/mri-processed.zip?download=1", - "timetable.tsv": "https://github.com/jorgenriseth/gonzo/blob/main/mri_dataset/timetable.tsv?raw=true", - }, + links={"mritk-test-data.zip": download_link_google_drive("1CSj3CHd4ztcU4Aqdlw9K2OWjPi5u75bd")}, ), "gonzo": Dataset( name="The Gonzo Dataset", diff --git a/src/mritk/masking/__init__.py b/src/mritk/masking/__init__.py new file mode 100644 index 0000000..4bd6155 --- /dev/null +++ b/src/mritk/masking/__init__.py @@ -0,0 +1,8 @@ +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + + +from . import masks + +__all__ = ["masks", "utils"] diff --git a/src/mritk/masking/masks.py b/src/mritk/masking/masks.py new file mode 100644 index 0000000..1ead7ae --- /dev/null +++ b/src/mritk/masking/masks.py @@ -0,0 +1,72 @@ +"""Intracranial and CSF masks generation module + +Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +Copyright (C) 2026 Simula Research Laboratory +""" + +import numpy as np +import skimage +from typing import Optional +from pathlib import Path + +from ..data.base import MRIData +from ..data.io import load_mri_data, save_mri_data +from ..data.orientation import assert_same_space +from .utils import largest_island + + +def create_csf_mask( + vol: np.ndarray, + connectivity: Optional[int] = 2, + use_li: bool = False, +) -> np.ndarray: + connectivity = connectivity or vol.ndim + if use_li: + thresh = skimage.filters.threshold_li(vol) + binary = vol > thresh + binary = largest_island(binary, connectivity=connectivity) + else: + (hist, bins) = np.histogram(vol[(vol > 0) * (vol < np.quantile(vol, 0.999))], bins=512) + thresh = skimage.filters.threshold_yen(hist=(hist, bins)) + binary = vol > thresh + binary = largest_island(binary, connectivity=connectivity) + return binary + + +def csf_mask( + input: Path, + connectivity: Optional[int] = 2, + use_li: bool = False, + output: Path | None = None, +) -> MRIData: + input_vol = load_mri_data(input, dtype=np.single) + mask = create_csf_mask(input_vol.data, connectivity, use_li) + assert np.max(mask) > 0, "Masking failed, no voxels in mask" + mri_data = MRIData(data=mask, affine=input_vol.affine) + if output is not None: + save_mri_data(mri_data, output, dtype=np.uint8) + return mri_data + + +def create_intracranial_mask(csf_mask: MRIData, segmentation: MRIData) -> np.ndarray: + assert_same_space(csf_mask, segmentation) + combined_mask = csf_mask.data + segmentation.data.astype(bool) + background_mask = largest_island(~combined_mask, connectivity=1) + opened = skimage.morphology.binary_opening(background_mask, skimage.morphology.ball(3)) + return ~opened + # return MRIData(data=~opened, affine=segmentation.affine) + + +def intracranial_mask( + csf_mask: Path, + segmentation: Path, + output: Optional[Path] = None, +) -> MRIData: + input_csf_mask = load_mri_data(csf_mask, dtype=bool) + segmentation_data = load_mri_data(segmentation, dtype=bool) + mask_data = create_intracranial_mask(input_csf_mask, segmentation_data) + mri_data = MRIData(data=mask_data, affine=segmentation_data.affine) + if output is not None: + save_mri_data(mri_data, output, dtype=np.uint8) + return mri_data diff --git a/src/mritk/masking/utils.py b/src/mritk/masking/utils.py new file mode 100644 index 0000000..2858a5c --- /dev/null +++ b/src/mritk/masking/utils.py @@ -0,0 +1,16 @@ +"""Masking utils + +Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +Copyright (C) 2026 Simula Research Laboratory +""" + +import numpy as np +import skimage + + +def largest_island(mask: np.ndarray, connectivity: int = 1) -> np.ndarray: + newmask = skimage.measure.label(mask, connectivity=connectivity) + regions = skimage.measure.regionprops(newmask) + regions.sort(key=lambda x: x.num_pixels, reverse=True) + return newmask == regions[0].label diff --git a/src/mritk/t1_maps/__init__.py b/src/mritk/t1_maps/__init__.py new file mode 100644 index 0000000..1b67101 --- /dev/null +++ b/src/mritk/t1_maps/__init__.py @@ -0,0 +1,7 @@ +""" +Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +Copyright (C) 2026 Simula Research Laboratory +""" + +__all__ = ["dicom_to_nifti", "t1_maps", "t1_to_r1", "utils"] diff --git a/src/mritk/t1_maps/dicom_to_nifti.py b/src/mritk/t1_maps/dicom_to_nifti.py new file mode 100644 index 0000000..3587c8f --- /dev/null +++ b/src/mritk/t1_maps/dicom_to_nifti.py @@ -0,0 +1,126 @@ +"""MRI DICOM to NIfTI conversion Module + +Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +Copyright (C) 2026 Simula Research Laboratory +""" + +import shutil +import subprocess +import tempfile +from pathlib import Path +from typing import Optional +import nibabel +import json + +import numpy as np + +from ..data.io import load_mri_data, save_mri_data +from ..t1_maps.utils import VOLUME_LABELS, read_dicom_trigger_times +from .utils import extract_single_volume, logger + + +def extract_mixed_dicom(dcmpath: Path, subvolumes: list[str]): + import pydicom + + dcm = pydicom.dcmread(dcmpath) + frames_total = int(dcm.NumberOfFrames) + frames_per_volume = dcm[0x2001, 0x1018].value # [Number of Slices MR] + num_volumes = frames_total // frames_per_volume + assert num_volumes * frames_per_volume == frames_total, "Subvolume dimensions do not match" + + D = dcm.pixel_array.astype(np.single) + frame_fg_sequence = dcm.PerFrameFunctionalGroupsSequence + + vols_out = [] + for volname in subvolumes: + vol_idx = VOLUME_LABELS.index(volname) + + # Find volume slices representing current subvolume + subvol_idx_start = vol_idx * frames_per_volume + subvol_idx_end = (vol_idx + 1) * frames_per_volume + frame_fg = frame_fg_sequence[subvol_idx_start] + logger.info( + ( + f"Converting volume {vol_idx + 1}/{len(VOLUME_LABELS)}: {volname} between indices" + + f"{subvol_idx_start, subvol_idx_end} / {frames_total}." + ) + ) + mri = extract_single_volume(D[subvol_idx_start:subvol_idx_end], frame_fg) + + nii_oriented = nibabel.nifti1.Nifti1Image(mri.data, mri.affine) + nii_oriented.set_sform(nii_oriented.affine, "scanner") + nii_oriented.set_qform(nii_oriented.affine, "scanner") + + # Include meta-data + description = { + "TR": float(frame_fg.MRTimingAndRelatedParametersSequence[0].RepetitionTime), + "TE": float(frame_fg.MREchoSequence[0].EffectiveEchoTime), + } + if hasattr(frame_fg.MRModifierSequence[0], "InversionTimes"): + description["TI"] = frame_fg.MRModifierSequence[0].InversionTimes[0] + if hasattr(frame_fg.MRTimingAndRelatedParametersSequence[0], "EchoTrainLength"): + description["ETL"] = frame_fg.MRTimingAndRelatedParametersSequence[0].EchoTrainLength + vols_out.append({"nifti": nii_oriented, "descrip": description}) + return vols_out + + +def dicom_to_looklocker(dicomfile: Path, outpath: Path): + outdir, form = outpath.parent, outpath.stem + outdir.mkdir(exist_ok=True, parents=True) + times = read_dicom_trigger_times(dicomfile) + np.savetxt(f"{outdir}/{form}" + "_trigger_times.txt", times) + + with tempfile.TemporaryDirectory(prefix=outpath.stem) as tmpdir: + tmppath = Path(tmpdir) + cmd = f"dcm2niix -f {form} -z y --ignore_trigger_times -o '{tmppath}' '{dicomfile}' > /tmp/dcm2niix.txt" + subprocess.run(cmd, shell=True, check=True) + shutil.copy( + tmppath / f"{form}.json", + outpath.with_suffix(".json"), + ) + mri = load_mri_data(tmppath / f"{form}.nii.gz", dtype=np.double) + save_mri_data(mri, outpath.with_suffix(".nii.gz"), dtype=np.single, intent_code=2001) + + +def dicom_to_mixed( + dcmpath: Path, + outpath: Path, + subvolumes: Optional[list[str]] = None, +): + subvolumes = subvolumes or VOLUME_LABELS + assert all([volname in VOLUME_LABELS for volname in subvolumes]), ( + f"Invalid subvolume name in {subvolumes}, not in {VOLUME_LABELS}" + ) + outdir, form = outpath.parent, outpath.stem + outdir.mkdir(exist_ok=True, parents=True) + + vols = extract_mixed_dicom(dcmpath, subvolumes) + meta = {} + for vol, volname in zip(vols, subvolumes): + output = outpath.with_name(outpath.stem + "_" + volname + ".nii.gz") + + nii = vol["nifti"] + descrip = vol["descrip"] + nibabel.nifti1.save(nii, output) + try: + if volname == "SE-modulus": + meta["TR_SE"] = descrip["TR"] + meta["TE"] = descrip["TE"] + meta["ETL"] = descrip["ETL"] + elif volname == "IR-corrected-real": + meta["TR_IR"] = descrip["TR"] + meta["TI"] = descrip["TI"] + except KeyError as e: + print(volname, descrip) + raise e + + with open(outpath.parent / f"{form}_meta.json", "w") as f: + json.dump(meta, f) + + try: + cmd = f"dcm2niix -w 0 --terse -b o -f '{form}' -o '{outdir}' '{dcmpath}' >> /tmp/dcm2niix.txt " + subprocess.run(cmd, shell=True).check_returncode() + except (ValueError, subprocess.CalledProcessError) as e: + print(str(e)) + pass diff --git a/src/mritk/t1_maps/t1_maps.py b/src/mritk/t1_maps/t1_maps.py new file mode 100644 index 0000000..a6e4184 --- /dev/null +++ b/src/mritk/t1_maps/t1_maps.py @@ -0,0 +1,178 @@ +"""T1 Maps generation module + +Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +Copyright (C) 2026 Simula Research Laboratory +""" + +import numpy as np +import tqdm +from functools import partial +import skimage +from typing import Optional +from pathlib import Path +import nibabel +import json +import scipy + +from ..data.base import MRIData +from ..data.io import load_mri_data, save_mri_data +from ..masking.masks import create_csf_mask +from .utils import ( + mri_facemask, + fit_voxel, + nan_filter_gaussian, + T1_lookup_table, +) + + +def looklocker_t1map(looklocker_input: Path, timestamps: Path, output: Path | None = None) -> MRIData: + LL_mri = load_mri_data(looklocker_input, dtype=np.single) + D = LL_mri.data + affine = LL_mri.affine + t_data = np.loadtxt(timestamps) / 1000 + + T1_ROOF = 10000 + assert len(D.shape) >= 4, f"data should be 4-dimensional, got data with shape {D.shape}" + mask = mri_facemask(D[..., 0]) + valid_voxels = (np.nanmax(D, axis=-1) > 0) * mask + + D_normalized = np.nan * np.zeros_like(D) + D_normalized[valid_voxels] = D[valid_voxels] / np.nanmax(D, axis=-1)[valid_voxels, np.newaxis] + voxel_mask = np.array(np.where(valid_voxels)).T + Dmasked = np.array([D_normalized[i, j, k] for (i, j, k) in voxel_mask]) + + with tqdm.tqdm(total=len(Dmasked)) as pbar: + voxel_fitter = partial(fit_voxel, t_data, pbar) + vfunc = np.vectorize(voxel_fitter, signature="(n) -> (3)") + fitted_coefficients = vfunc(Dmasked) + + _, x2, x3 = ( + fitted_coefficients[:, 0], + fitted_coefficients[:, 1], + fitted_coefficients[:, 2], + ) + + I, J, K = voxel_mask.T + T1map = np.nan * np.zeros_like(D[..., 0]) + T1map[I, J, K] = (x2 / x3) ** 2 * 1000.0 # convert to ms + T1map = np.minimum(T1map, T1_ROOF) + T1map_mri = MRIData(T1map.astype(np.single), affine) + if output is not None: + save_mri_data(T1map_mri, output, dtype=np.single) + + return T1map_mri + + +def looklocker_t1map_postprocessing( + T1map: Path, + T1_low: float, + T1_high: float, + radius: int = 10, + erode_dilate_factor: float = 1.3, + mask: Optional[np.ndarray] = None, + output: Path | None = None, +) -> MRIData: + T1map_mri = load_mri_data(T1map, dtype=np.single) + T1map_data = T1map_mri.data.copy() + if mask is None: + # Create mask for largest island. + mask = skimage.measure.label(np.isfinite(T1map_data)) + regions = skimage.measure.regionprops(mask) + regions.sort(key=lambda x: x.num_pixels, reverse=True) + mask = mask == regions[0].label + skimage.morphology.remove_small_holes(mask, max_size=10 ** (mask.ndim), connectivity=2, out=mask) + skimage.morphology.dilation(mask, skimage.morphology.ball(radius), out=mask) + skimage.morphology.erosion(mask, skimage.morphology.ball(erode_dilate_factor * radius), out=mask) + + # Remove non-zero artifacts outside of the mask. + surface_vox = np.isfinite(T1map_data) * (~mask) + print(f"Removing {surface_vox.sum()} voxels outside of the head mask") + T1map_data[~mask] = np.nan + + # Remove outliers within the mask. + outliers = np.logical_or(T1map_data < T1_low, T1_high < T1map_data) + print("Removing", outliers.sum(), f"voxels outside the range ({T1_low}, {T1_high}).") + T1map_data[outliers] = np.nan + if np.isfinite(T1map_data).sum() / T1map_data.size < 0.01: + raise RuntimeError("After outlier removal, less than 1% of the image is left. Check image units.") + + # Fill internallly missing values + fill_mask = np.isnan(T1map_data) * mask + while fill_mask.sum() > 0: + print(f"Filling in {fill_mask.sum()} voxels within the mask.") + T1map_data[fill_mask] = nan_filter_gaussian(T1map_data, 1.0)[fill_mask] + fill_mask = np.isnan(T1map_data) * mask + + processed_T1map = MRIData(T1map_data, T1map_mri.affine) + if output is not None: + save_mri_data(processed_T1map, output, dtype=np.single) + + return processed_T1map + + +def mixed_t1map( + SE_nii_path: Path, IR_nii_path: Path, meta_path: Path, T1_low: float, T1_high: float, output: Path | None = None +) -> nibabel.nifti1.Nifti1Image: + SE = load_mri_data(SE_nii_path, dtype=np.single) + IR = load_mri_data(IR_nii_path, dtype=np.single) + with open(meta_path, "r") as f: + meta = json.load(f) + + nonzero_mask = SE.data != 0 + F_data = np.nan * np.zeros_like(IR.data) + F_data[nonzero_mask] = IR.data[nonzero_mask] / SE.data[nonzero_mask] + + TR_se, TI, TE, ETL = meta["TR_SE"], meta["TI"], meta["TE"], meta["ETL"] + F, T1_grid = T1_lookup_table(TR_se, TI, TE, ETL, T1_low, T1_high) + interpolator = scipy.interpolate.interp1d(F, T1_grid, kind="nearest", bounds_error=False, fill_value=np.nan) + T1_volume = interpolator(F_data).astype(np.single) + nii = nibabel.nifti1.Nifti1Image(T1_volume, IR.affine) + nii.set_sform(nii.affine, "scanner") + nii.set_qform(nii.affine, "scanner") + + if output is not None: + nibabel.nifti1.save(nii, output) + + return nii + + +def mixed_t1map_postprocessing(SE_nii_path: Path, T1_path: Path, output: Path | None = None) -> nibabel.nifti1.Nifti1Image: + T1map_nii = nibabel.nifti1.load(T1_path) + + SE_mri = load_mri_data(SE_nii_path, np.single) + mask = create_csf_mask(SE_mri.data, use_li=True) + mask = skimage.morphology.erosion(mask) + + masked_T1map = T1map_nii.get_fdata(dtype=np.single) + masked_T1map[~mask] = np.nan + masked_T1map_nii = nibabel.nifti1.Nifti1Image(masked_T1map, T1map_nii.affine, T1map_nii.header) + + if output is not None: + nibabel.nifti1.save(masked_T1map_nii, output) + + return masked_T1map_nii + + +def hybrid_t1map( + LL_path: Path, mixed_path: Path, csf_mask_path: Path, threshold: float, erode: int = 0, output: Path | None = None +) -> nibabel.nifti1.Nifti1Image: + mixed_mri = nibabel.nifti1.load(mixed_path) + mixed = mixed_mri.get_fdata() + + ll_mri = nibabel.nifti1.load(LL_path) + ll = ll_mri.get_fdata() + csf_mask_mri = nibabel.nifti1.load(csf_mask_path) + csf_mask = csf_mask_mri.get_fdata().astype(bool) + if erode > 0: + csf_mask = skimage.morphology.erosion(csf_mask, skimage.morphology.ball(erode)) + + hybrid = ll + newmask = csf_mask * (ll > threshold) * (mixed > threshold) + hybrid[newmask] = mixed[newmask] + + hybrid_nii = nibabel.nifti1.Nifti1Image(hybrid, affine=ll_mri.affine, header=ll_mri.header) + if output is not None: + nibabel.nifti1.save(hybrid_nii, output) + + return hybrid_nii diff --git a/src/mritk/t1_maps/t1_to_r1.py b/src/mritk/t1_maps/t1_to_r1.py new file mode 100644 index 0000000..af4290d --- /dev/null +++ b/src/mritk/t1_maps/t1_to_r1.py @@ -0,0 +1,52 @@ +"""T1 to R1 module + +Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +Copyright (C) 2026 Simula Research Laboratory +""" + +import numpy as np +from pathlib import Path +from typing import Union + +from ..data.base import MRIData +from ..data.io import load_mri_data, save_mri_data + + +def convert_T1_to_R1( + T1map_mri: MRIData, + scale: float = 1000, + t1_low: float = 1, + t1_high: float = float("inf"), +) -> MRIData: + valid_t1 = (t1_low <= T1map_mri.data) * (T1map_mri.data <= t1_high) + R1map = np.nan * np.zeros_like(T1map_mri.data) + R1map[valid_t1] = scale / np.minimum(t1_high, np.maximum(t1_low, T1map_mri.data[valid_t1])) + + R1map_mri = MRIData(data=R1map, affine=T1map_mri.affine) + return R1map_mri + + +def T1_to_R1( + input_mri: Union[Path, MRIData], + output: Path | None = None, + scale: float = 1000, + t1_low: float = 1, + t1_high: float = float("inf"), +) -> MRIData: + if isinstance(input_mri, Path): + T1map_mri = load_mri_data(input_mri, dtype=np.single) + elif isinstance(input_mri, MRIData): + T1map_mri = input_mri + else: + raise ValueError(f"Input should be a Path or MRIData, got {type(input_mri)}") + + valid_t1 = (t1_low <= T1map_mri.data) * (T1map_mri.data <= t1_high) + R1map = np.nan * np.zeros_like(T1map_mri.data) + R1map[valid_t1] = scale / np.minimum(t1_high, np.maximum(t1_low, T1map_mri.data[valid_t1])) + + R1map_mri = MRIData(data=R1map, affine=T1map_mri.affine) + + if output is not None: + save_mri_data(R1map_mri, output, dtype=np.single) + return R1map_mri diff --git a/src/mritk/t1_maps/utils.py b/src/mritk/t1_maps/utils.py new file mode 100644 index 0000000..d2a3258 --- /dev/null +++ b/src/mritk/t1_maps/utils.py @@ -0,0 +1,210 @@ +"""MRI DICOM to NIfTI conversion - utils + +Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +Copyright (C) 2026 Simula Research Laboratory +""" + +import numpy as np +import logging +import scipy as sp +import skimage +import warnings +from scipy.optimize import OptimizeWarning +import os +import nibabel + +from ..data.orientation import data_reorientation, change_of_coordinates_map +from ..data.base import MRIData + +logger = logging.getLogger(__name__) + +VOLUME_LABELS = [ + "IR-modulus", + "IR-real", + "IR-corrected-real", + "SE-modulus", + "SE-real", + "T1map-scanner", +] + + +def read_dicom_trigger_times(dicomfile): + import pydicom + + dcm = pydicom.dcmread(dicomfile) + all_frame_times = [ + f.CardiacSynchronizationSequence[0].NominalCardiacTriggerDelayTime for f in dcm.PerFrameFunctionalGroupsSequence + ] + return np.unique(all_frame_times) + + +def dicom_standard_affine( + frame_fg, +) -> np.ndarray: + # Get the original data shape + df = float(frame_fg.PixelMeasuresSequence[0].SpacingBetweenSlices) + dr, dc = (float(x) for x in frame_fg.PixelMeasuresSequence[0].PixelSpacing) + plane_orientation = frame_fg.PlaneOrientationSequence[0] + orientation = np.array(plane_orientation.ImageOrientationPatient) + + # Find orientation of data array relative to LPS-coordinate system. + row_cosine = orientation[:3] + col_cosine = orientation[3:] + frame_cosine = np.cross(row_cosine, col_cosine) + + # Create DICOM-definition affine map to LPS. + T_1 = np.array(frame_fg.PlanePositionSequence[0].ImagePositionPatient) + + # Create DICOM-definition affine map to LPS. + M_dcm = np.zeros((4, 4)) + M_dcm[:3, 0] = row_cosine * dc + M_dcm[:3, 1] = col_cosine * dr + M_dcm[:3, 2] = frame_cosine * df + M_dcm[:3, 3] = T_1 + M_dcm[3, 3] = 1.0 + + # Reorder from "natural index order" to DICOM affine map definition order. + N_order = np.eye(4)[[2, 1, 0, 3]] + return M_dcm @ N_order + + +def extract_single_volume( + D: np.ndarray, + frame_fg, +) -> MRIData: + # Find scaling values (should potentially be inside scaling loop) + pixel_value_transform = frame_fg.PixelValueTransformationSequence[0] + slope = float(pixel_value_transform.RescaleSlope) + intercept = float(pixel_value_transform.RescaleIntercept) + private = frame_fg[0x2005, 0x140F][0] + scale_slope = private[0x2005, 0x100E].value + + # Loop over and scale values. + volume = np.zeros_like(D, dtype=np.single) + for idx in range(D.shape[0]): + volume[idx] = (intercept + slope * D[idx]) / (scale_slope * slope) + + A_dcm = dicom_standard_affine(frame_fg) + C = change_of_coordinates_map("LPS", "RAS") + mri = data_reorientation(MRIData(volume, C @ A_dcm)) + + return mri + + +def mri_facemask(vol: np.ndarray, smoothing_level=5): + thresh = skimage.filters.threshold_triangle(vol) + binary = vol > thresh + binary = sp.ndimage.binary_fill_holes(binary) + binary = skimage.filters.gaussian(binary, sigma=smoothing_level) + binary = binary > skimage.filters.threshold_isodata(binary) + return binary + + +def voxel_fit_function(t, x1, x2, x3): + return np.abs(x1 * (1.0 - (1 + x2**2) * np.exp(-(x3**2) * t))) + + +@np.errstate(divide="raise", invalid="raise", over="raise") +def curve_fit_wrapper(f, t, y, p0): + """Raises error instead of catching numpy warnings, such that + these cases may be treated.""" + with warnings.catch_warnings(): + warnings.simplefilter("error", OptimizeWarning) + popt, _ = sp.optimize.curve_fit(f, xdata=t, ydata=y, p0=p0, maxfev=1000) + return popt + + +def fit_voxel(time_s: np.ndarray, pbar, m: np.ndarray) -> np.ndarray: + if pbar is not None: + pbar.update(1) + x1 = 1.0 + x2 = np.sqrt(1.25) + T1 = time_s[np.argmin(m)] / np.log(1 + x2**2) + x3 = np.sqrt(1 / T1) + p0 = np.array((x1, x2, x3)) + if not np.all(np.isfinite(m)): + return np.nan * np.zeros_like(p0) + try: + popt = curve_fit_wrapper(voxel_fit_function, time_s, m, p0) + except (OptimizeWarning, FloatingPointError): + return np.nan * np.zeros_like(p0) + except RuntimeError as e: + if "maxfev" in str(e): + return np.nan * np.zeros_like(p0) + raise e + return popt + + +def nan_filter_gaussian(U: np.ndarray, sigma: float, truncate: float = 4.0) -> np.ndarray: + V = U.copy() + V[np.isnan(U)] = 0 + VV = sp.ndimage.gaussian_filter(V, sigma=sigma, truncate=truncate) + + W = np.ones_like(U) + W[np.isnan(U)] = 0 + WW = sp.ndimage.gaussian_filter(W, sigma=sigma, truncate=truncate) + mask = ~((WW == 0) * (VV == 0)) + out = np.nan * np.zeros_like(U) + out[mask] = VV[mask] / WW[mask] + return out + + +def estimate_se_free_relaxation_time(TRse, TE, ETL): + """Compute free relaxation time following spin echo image from effective echo + time TE and echo train length ETL, corrected for 20 dummy echoes.""" + return TRse - TE * (1 + 0.5 * (ETL - 1) / (0.5 * (ETL + 1) + 20)) + + +def T1_lookup_table(TRse: float, TI: float, TE: float, ETL: int, T1_low: float, T1_hi: float) -> tuple[np.ndarray, np.ndarray]: + TRfree = estimate_se_free_relaxation_time(TRse, TE, ETL) + T1_grid = np.arange(int(T1_low), int(T1_hi + 1)) + Sse = 1 - np.exp(-TRfree / T1_grid) + Sir = 1 - (1 + Sse) * np.exp(-TI / T1_grid) + fractionCurve = Sir / Sse + return fractionCurve, T1_grid + + +def compare_nifti_images(img_path1, img_path2, data_tolerance=0.0): + """ + Compares two NIfTI images for equality of data, affine, and header. + + Args: + img_path1 (str): Path to the first NIfTI file. + img_path2 (str): Path to the second NIfTI file. + data_tolerance (float): Tolerance for data comparison (use 0.0 for exact equality). + + Returns: + bool: True if images are considered the same, False otherwise. + """ + if not os.path.exists(img_path1): + return False, [f"File not found: {img_path1}"] + if not os.path.exists(img_path2): + return False, [f"File not found: {img_path2}"] + + img1 = nibabel.load(img_path1) + img2 = nibabel.load(img_path2) + + # 1. Compare Image Data + data1 = img1.get_fdata() + data2 = img2.get_fdata() + + # Convert NaN to zero (can have NaNs in concentration maps) + data1 = np.nan_to_num(data1, nan=0.0) + data2 = np.nan_to_num(data2, nan=0.0) + + print("data 1= ", np.unique(data1)) + print("data 1= ", np.unique(data2)) + + # Use np.allclose for data comparison with tolerance, which is often needed + # for floating-point data, or np.array_equal for exact comparison. + if data_tolerance > 0: + data_equal = np.allclose(data1, data2, atol=data_tolerance) + else: + data_equal = np.array_equal(data1, data2) + + deviation = np.mean(np.abs(data1 - data2)) + assert data_equal, f"Data mismatch (mean absolute deviation: {deviation:.4f})" + + # Overall result + return data_equal diff --git a/test/test_concentration.py b/test/test_concentration.py new file mode 100644 index 0000000..73936d8 --- /dev/null +++ b/test/test_concentration.py @@ -0,0 +1,33 @@ +"""MRI Concentration maps - Tests + +Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +Copyright (C) 2026 Simula Research Laboratory +""" + +from pathlib import Path + +from mritk.concentration.concentration import concentration + +from mritk.t1_maps.utils import compare_nifti_images + + +def test_intracranial_concentration(tmp_path, mri_data_dir: Path): + baseline_path = mri_data_dir / "mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-01_T1map_hybrid.nii.gz" + sessions = [1, 2] + + images_path = [ + mri_data_dir / f"mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-0{i}_T1map_hybrid.nii.gz" for i in sessions + ] + mask_path = mri_data_dir / "mri-processed/mri_processed_data/sub-01/segmentations/sub-01_seg-intracranial_binary.nii.gz" + r1 = 0.0032 + + ref_outputs = [ + mri_data_dir / f"mri-processed/mri_processed_data/sub-01/concentrations/sub-01_ses-0{i}_concentration.nii.gz" + for i in sessions + ] + test_outputs = [tmp_path / f"output_ses-0{i}_concentration.nii.gz" for i in sessions] + + for i, s in enumerate(sessions): + concentration(input=images_path[i], reference=baseline_path, output=test_outputs[i], r1=r1, mask=mask_path) + compare_nifti_images(test_outputs[i], ref_outputs[i], data_tolerance=1e-12) diff --git a/test/test_datasets.py b/test/test_datasets.py index 16bcbe3..e4493c7 100644 --- a/test/test_datasets.py +++ b/test/test_datasets.py @@ -31,9 +31,9 @@ def mock_datasets(): def test_get_datasets_structure(): """Ensure get_datasets returns a dict of Dataset objects.""" datasets = mritk.datasets.get_datasets() - assert "test-data" in datasets - assert isinstance(datasets["test-data"], Dataset) - assert datasets["test-data"].doi == "10.5281/zenodo.14266867" + assert "gonzo" in datasets + assert isinstance(datasets["gonzo"], Dataset) + assert datasets["gonzo"].doi == "10.5281/zenodo.14266867" def test_add_arguments(): diff --git a/test/test_mri_t1_maps.py b/test/test_mri_t1_maps.py new file mode 100644 index 0000000..d40f934 --- /dev/null +++ b/test/test_mri_t1_maps.py @@ -0,0 +1,76 @@ +"""MRI T1 maps - Tests + +Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +Copyright (C) 2026 Simula Research Laboratory +""" + +from pathlib import Path +import pytest + +from mritk.t1_maps.t1_maps import ( + looklocker_t1map, + looklocker_t1map_postprocessing, + mixed_t1map, + mixed_t1map_postprocessing, + hybrid_t1map, +) + +from mritk.t1_maps.utils import compare_nifti_images + + +@pytest.mark.skip(reason="Takes too long") +def test_looklocker_t1map(tmp_path, mri_data_dir: Path): + LL_path = mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/anat/sub-01_ses-01_acq-looklocker_IRT1.nii.gz" + timestamps = ( + mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/anat/sub-01_ses-01_acq-looklocker_IRT1_trigger_times.txt" + ) + T1_low = 100 + T1_high = 6000 + + ref_output = mri_data_dir / "mri-dataset/mri_dataset/derivatives/sub-01" / "ses-01/sub-01_ses-01_acq-looklocker_T1map.nii.gz" + test_output_raw = tmp_path / "output_acq-looklocker_T1map_raw.nii.gz" + test_output = tmp_path / "output_acq-looklocker_T1map.nii.gz" + + looklocker_t1map(looklocker_input=LL_path, timestamps=timestamps, output=test_output_raw) + looklocker_t1map_postprocessing(T1map=test_output_raw, T1_low=T1_low, T1_high=T1_high, output=test_output) + compare_nifti_images(test_output, ref_output, data_tolerance=1e-12) + + +def test_mixed_t1map(tmp_path, mri_data_dir: Path): + SE_path = mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/mixed/sub-01_ses-01_acq-mixed_SE-modulus.nii.gz" + IR_path = mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/mixed/sub-01_ses-01_acq-mixed_IR-corrected-real.nii.gz" + meta_path = mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/mixed/sub-01_ses-01_acq-mixed_meta.json" + + ref_output = mri_data_dir / "mri-dataset/mri_dataset/derivatives/sub-01" / "ses-01/sub-01_ses-01_acq-mixed_T1map.nii.gz" + test_output_raw = tmp_path / "output_acq-mixed_T1map_raw.nii.gz" + test_output = tmp_path / "output_acq-mixed_T1map.nii.gz" + + T1_low = 100 + T1_high = 10000 + + mixed_t1map( + SE_nii_path=SE_path, IR_nii_path=IR_path, meta_path=meta_path, T1_low=T1_low, T1_high=T1_high, output=test_output_raw + ) + mixed_t1map_postprocessing(SE_nii_path=SE_path, T1_path=test_output_raw, output=test_output) + + compare_nifti_images(test_output, ref_output, data_tolerance=1e-12) + + +def test_hybrid_t1map(tmp_path, mri_data_dir: Path): + LL_path = ( + mri_data_dir / "mri-processed/mri_processed_data/sub-01/registered/sub-01_ses-01_acq-looklocker_T1map_registered.nii.gz" + ) + mixed_path = ( + mri_data_dir / "mri-processed/mri_processed_data/sub-01/registered/sub-01_ses-01_acq-mixed_T1map_registered.nii.gz" + ) + csf_mask_path = mri_data_dir / "mri-processed/mri_processed_data/sub-01/segmentations/sub-01_seg-csf_binary.nii.gz" + test_output = tmp_path / "output_T1map_hybrid.nii.gz" + ref_output = mri_data_dir / "mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-01_T1map_hybrid.nii.gz" + threshold = 1500 + erode = 1 + + hybrid_t1map( + LL_path=LL_path, mixed_path=mixed_path, csf_mask_path=csf_mask_path, threshold=threshold, erode=erode, output=test_output + ) + compare_nifti_images(test_output, ref_output, data_tolerance=1e-12)