Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DAR-4924][External] Resolving issues with import & export of NifTI annotations #979

Merged
merged 4 commits into from
Dec 17, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 22 additions & 7 deletions darwin/dataset/remote_dataset_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
Tuple,
Union,
)

import numpy as np
from pydantic import ValidationError
from requests.models import Response

Expand Down Expand Up @@ -873,10 +873,15 @@ def register_multi_slotted(
print(f"Reistration complete. Check your items in the dataset: {self.slug}")
return results

def _get_remote_files_that_require_legacy_scaling(self) -> List[Path]:
def _get_remote_files_that_require_legacy_scaling(
self,
) -> Dict[str, Dict[str, Any]]:
"""
Get all remote files that have been scaled upon upload. These files require that
NifTI annotations are similarly scaled during import
NifTI annotations are similarly scaled during import.

The in-platform affines are returned for each legacy file, as this is required
to properly re-orient the annotations during import.

Parameters
----------
Expand All @@ -885,21 +890,31 @@ def _get_remote_files_that_require_legacy_scaling(self) -> List[Path]:

Returns
-------
List[Path]
A list of full remote paths of dataset items that require NifTI annotations to be scaled
Dict[str, Dict[str, Any]]
A dictionary of remote file full paths to their slot affine maps
"""
remote_files_that_require_legacy_scaling = []
remote_files_that_require_legacy_scaling = {}
remote_files = self.fetch_remote_files(
filters={"statuses": ["new", "annotate", "review", "complete", "archived"]}
)
for remote_file in remote_files:
if not remote_file.slots[0].get("metadata", {}).get("medical", {}):
continue
if not (
remote_file.slots[0]
.get("metadata", {})
.get("medical", {})
.get("handler")
):
remote_files_that_require_legacy_scaling.append(remote_file.full_path)
slot_affine_map = {}
for slot in remote_file.slots:
slot_affine_map[slot["slot_name"]] = np.array(
slot["metadata"]["medical"]["affine"],
dtype=np.float64,
)
remote_files_that_require_legacy_scaling[
Path(remote_file.full_path)
] = slot_affine_map
JBWilkie marked this conversation as resolved.
Show resolved Hide resolved

return remote_files_that_require_legacy_scaling

Expand Down
43 changes: 31 additions & 12 deletions darwin/exporter/formats/nifti.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ def _console_theme() -> Theme:
console = Console(theme=_console_theme())
try:
import nibabel as nib
from nibabel.orientations import io_orientation, ornt_transform
except ImportError:
import_fail_string = r"""
You must install darwin-py with pip install darwin-py\[medical]
Expand Down Expand Up @@ -128,7 +127,11 @@ def export(
polygon_annotations, slot_map, output_volumes, legacy=legacy
)
write_output_volume_to_disk(
output_volumes, image_id=image_id, output_dir=output_dir, legacy=legacy
output_volumes,
image_id=image_id,
output_dir=output_dir,
legacy=legacy,
filename=video_annotation.filename,
)
# Need to map raster layers to SeriesInstanceUIDs
if mask_present:
Expand Down Expand Up @@ -456,6 +459,7 @@ def write_output_volume_to_disk(
image_id: str,
output_dir: Union[str, Path],
legacy: bool = False,
filename: str = None,
) -> None:
"""Writes the given output volumes to disk.

Expand All @@ -470,6 +474,8 @@ def write_output_volume_to_disk(
legacy : bool, default=False
If ``True``, the exporter will use the legacy calculation.
If ``False``, the exporter will use the new calculation by dividing with pixdims.
filename: str
The filename of the dataset item

Returns
-------
Expand All @@ -489,18 +495,10 @@ def unnest_dict_to_list(d: Dict) -> List:
volumes = unnest_dict_to_list(output_volumes)
for volume in volumes:
img = nib.Nifti1Image(
dataobj=np.flip(volume.pixel_array, (0, 1, 2)).astype(np.int16),
dataobj=volume.pixel_array.astype(np.int16),
affine=volume.affine,
)
JBWilkie marked this conversation as resolved.
Show resolved Hide resolved
if legacy and volume.original_affine is not None:
orig_ornt = io_orientation(
volume.original_affine
) # Get orientation of current affine
img_ornt = io_orientation(volume.affine) # Get orientation of RAS affine
from_canonical = ornt_transform(
img_ornt, orig_ornt
) # Get transform from RAS to current affine
img = img.as_reoriented(from_canonical)
img = get_reoriented_nifti_image(img, volume)
if volume.from_raster_layer:
output_path = Path(output_dir) / f"{image_id}_{volume.class_name}_m.nii.gz"
else:
Expand All @@ -510,6 +508,27 @@ def unnest_dict_to_list(d: Dict) -> List:
nib.save(img=img, filename=output_path)


def get_reoriented_nifti_image(img: nib.Nifti1Image, volume: Dict) -> nib.Nifti1Image:
"""
Reorients the given NIfTI image based on the original affine.

Parameters
----------
img: nib.Nifti1Image
The NIfTI image to be reoriented
volume: Dict
The volume containing the affine and original affine
"""
if volume.original_affine is not None:
img_ax_codes = nib.orientations.aff2axcodes(volume.affine)
Copy link
Contributor

@dorfmanrobert dorfmanrobert Dec 11, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think this is correct for pre MED_2D due to the volume.affine being (or expected to be at least) RAS

So this flow will try to go from RAS -> original orientation.

But really its meant to go from LPI -> original orientation. I think this is why dataobj=np.flip(volume.pixel_array, (0, 1, 2)).astype(np.int16) used to be invoked: to go from LPI to RAS, so that then this logic would make sense for pre MED_2D files

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Updated

orig_ax_codes = nib.orientations.aff2axcodes(volume.original_affine)
img_ornt = nib.orientations.axcodes2ornt(img_ax_codes)
orig_ornt = nib.orientations.axcodes2ornt(orig_ax_codes)
transform = nib.orientations.ornt_transform(img_ornt, orig_ornt)
img = img.as_reoriented(transform)
return img


def shift_polygon_coords(
polygon: List[Dict], pixdim: List[Number], legacy: bool = False
) -> List:
Expand Down
51 changes: 35 additions & 16 deletions darwin/importer/formats/nifti.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import warnings
from collections import OrderedDict, defaultdict
from pathlib import Path
from typing import Dict, List, Optional, Tuple
from typing import Dict, List, Optional, Tuple, Any

from rich.console import Console

Expand Down Expand Up @@ -31,8 +31,7 @@

def parse_path(
path: Path,
legacy: bool = False,
remote_files_that_require_legacy_scaling: Optional[List] = [],
remote_files_that_require_legacy_scaling: Dict[Path, Dict[str, Any]] = {},
) -> Optional[List[dt.AnnotationFile]]:
"""
Parses the given ``nifti`` file and returns a ``List[dt.AnnotationFile]`` with the parsed
Expand All @@ -42,9 +41,8 @@ def parse_path(
----------
path : Path
The ``Path`` to the ``nifti`` file.
legacy : bool, default: False
If ``True``, the function will not attempt to resize the annotations to isotropic pixel dimensions.
If ``False``, the function will resize the annotations to isotropic pixel dimensions.
remote_files_that_require_legacy_scaling : Optional[Dict[Path, Dict[str, Any]]]
A dictionary of remote file full paths to their slot affine maps

Returns
-------
Expand Down Expand Up @@ -78,16 +76,20 @@ def parse_path(
return None
annotation_files = []
for nifti_annotation in nifti_annotations:
legacy = nifti_annotation["image"] in remote_files_that_require_legacy_scaling
remote_file_path = Path(nifti_annotation["image"])
if not str(remote_file_path).startswith("/"):
remote_file_path = Path("/" + str(remote_file_path))
JBWilkie marked this conversation as resolved.
Show resolved Hide resolved

annotation_file = _parse_nifti(
Path(nifti_annotation["label"]),
nifti_annotation["image"],
Path(nifti_annotation["image"]),
path,
class_map=nifti_annotation.get("class_map"),
mode=nifti_annotation.get("mode", "image"),
slot_names=nifti_annotation.get("slot_names", []),
is_mpr=nifti_annotation.get("is_mpr", False),
legacy=legacy,
remote_file_path=remote_file_path,
remote_files_that_require_legacy_scaling=remote_files_that_require_legacy_scaling,
)
annotation_files.append(annotation_file)
return annotation_files
Expand All @@ -101,10 +103,16 @@ def _parse_nifti(
mode: str,
slot_names: List[str],
is_mpr: bool,
legacy: bool = False,
remote_file_path: Path,
remote_files_that_require_legacy_scaling: Dict[Path, Dict[str, Any]] = {},
) -> dt.AnnotationFile:
img, pixdims = process_nifti(nib.load(nifti_path))
img, pixdims = process_nifti(
nib.load(nifti_path),
remote_file_path=remote_file_path,
remote_files_that_require_legacy_scaling=remote_files_that_require_legacy_scaling,
)

legacy = remote_file_path in remote_files_that_require_legacy_scaling
processed_class_map = process_class_map(class_map)
video_annotations = []
if mode == "instances": # For each instance produce a video annotation
Expand Down Expand Up @@ -159,11 +167,12 @@ def _parse_nifti(
dt.AnnotationClass(class_name, "mask", "mask")
for class_name in class_map.values()
}

remote_path = "/" if filename.parent == "." else filename.parent
filename = Path(filename.name)
return dt.AnnotationFile(
path=json_path,
filename=str(filename),
remote_path="/",
remote_path=str(remote_path),
annotation_classes=annotation_classes,
JBWilkie marked this conversation as resolved.
Show resolved Hide resolved
annotations=video_annotations,
slots=[
Expand Down Expand Up @@ -353,7 +362,7 @@ def nifti_to_video_polygon_annotation(
if len(all_frame_ids) == 1:
segments = [[all_frame_ids[0], all_frame_ids[0] + 1]]
elif len(all_frame_ids) > 1:
segments = [[min(all_frame_ids), max(all_frame_ids)]]
segments = [[min(all_frame_ids), max(all_frame_ids) + 1]]
video_annotation = dt.make_video_annotation(
JBWilkie marked this conversation as resolved.
Show resolved Hide resolved
frame_annotations,
keyframes={f_id: True for f_id in all_frame_ids},
Expand Down Expand Up @@ -513,16 +522,22 @@ def correct_nifti_header_if_necessary(img_nii):
def process_nifti(
input_data: nib.nifti1.Nifti1Image,
ornt: Optional[List[List[float]]] = [[0.0, -1.0], [1.0, -1.0], [2.0, -1.0]],
remote_file_path: Path = Path("/"),
remote_files_that_require_legacy_scaling: Dict[Path, Dict[str, Any]] = {},
) -> Tuple[np.ndarray, Tuple[float]]:
"""
Converts a nifti object of any orientation to the passed ornt orientation.
The default ornt is LPI.

Args:
input_data: nibabel nifti object.
ornt: (n,2) orientation array. It defines a transformation from RAS.
ornt: (n,2) orientation array. It defines a transformation to LPI
JBWilkie marked this conversation as resolved.
Show resolved Hide resolved
ornt[N,1] is a flip of axis N of the array, where 1 means no flip and -1 means flip.
ornt[:,0] is the transpose that needs to be done to the implied array, as in arr.transpose(ornt[:,0]).
remote_file_path: Path
The full path of the remote file
remote_files_that_require_legacy_scaling: Dict[Path, Dict[str, Any]]
A dictionary of remote file full paths to their slot affine maps

Returns:
data_array: pixel array with orientation ornt.
Expand All @@ -531,9 +546,13 @@ def process_nifti(
img = correct_nifti_header_if_necessary(input_data)
orig_ax_codes = nib.orientations.aff2axcodes(img.affine)
orig_ornt = nib.orientations.axcodes2ornt(orig_ax_codes)
if remote_file_path in remote_files_that_require_legacy_scaling:
slot_affine_map = remote_files_that_require_legacy_scaling[remote_file_path]
affine = slot_affine_map[next(iter(slot_affine_map))] # Take the 1st slot
JBWilkie marked this conversation as resolved.
Show resolved Hide resolved
ax_codes = nib.orientations.aff2axcodes(affine)
ornt = nib.orientations.axcodes2ornt(ax_codes)
transform = nib.orientations.ornt_transform(orig_ornt, ornt)
reoriented_img = img.as_reoriented(transform)

data_array = reoriented_img.get_fdata()
pixdims = reoriented_img.header.get_zooms()

Expand Down
33 changes: 28 additions & 5 deletions tests/darwin/dataset/remote_dataset_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from pathlib import Path
from typing import Any, Dict
from unittest.mock import MagicMock, patch
import numpy as np

import orjson as json
import pytest
Expand Down Expand Up @@ -1906,7 +1907,14 @@ def mock_remote_files(self):
seq=1,
current_workflow_id=None,
path="/path/to/file",
slots=[{"metadata": {"medical": {"handler": "MONAI"}}}],
slots=[
{
"slot_name": "0",
"metadata": {
"medical": {"handler": "MONAI", "affine": [1, 0, 0, 0]}
},
}
],
layout={},
current_workflow=None,
),
Expand All @@ -1921,7 +1929,21 @@ def mock_remote_files(self):
seq=2,
current_workflow_id=None,
path="/path/to/file",
slots=[{"metadata": {"medical": {}}}],
slots=[
{
"slot_name": "0",
"metadata": {
"medical": {
"affine": [
[-1, 0, 0, 0],
[0, -1, 0, 0],
[0, 0, -1, 0],
[0, 0, 0, 1],
]
}
},
}
],
layout={},
current_workflow=None,
),
Expand All @@ -1941,6 +1963,7 @@ def test_get_remote_files_that_require_legacy_scaling(
)

result = remote_dataset._get_remote_files_that_require_legacy_scaling()

assert len(result) == 1
assert result[0] == "/path/to/file/filename"
assert Path("/path/to/file/filename") in result
np.testing.assert_array_equal(
result[Path("/path/to/file/filename")]["0"], np.array([[-1, 0, 0, 0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]]) # type: ignore
)
20 changes: 17 additions & 3 deletions tests/darwin/importer/formats/import_nifti_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,13 @@ def test_image_annotation_nifti_import_single_slot_to_mask_legacy(
with patch("darwin.importer.formats.nifti.zoom") as mock_zoom:
mock_zoom.side_effect = ndimage.zoom

remote_files_that_require_legacy_scaling = ["vol0 (1).nii"]
remote_files_that_require_legacy_scaling = {
Path("/vol0 (1).nii"): {
"0": np.array(
[[-1, 0, 0, 0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]]
)
}
}
annotation_files = parse_path(
path=upload_json,
remote_files_that_require_legacy_scaling=remote_files_that_require_legacy_scaling,
Expand Down Expand Up @@ -516,7 +522,11 @@ def test_parse_path_nifti_with_legacy_scaling():
)
adjust_nifti_label_filepath(nifti_annotation_filepath, nifti_filepath)
expected_annotations = parse_darwin_json(expected_annotations_filepath)
remote_files_that_require_legacy_scaling = ["BRAINIX_NIFTI_ROI.nii.gz"]
remote_files_that_require_legacy_scaling = {
Path("/BRAINIX_NIFTI_ROI.nii.gz"): {
"0": np.array([[-1, 0, 0, 0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]])
}
}
parsed_annotations = parse_path(
nifti_annotation_filepath,
remote_files_that_require_legacy_scaling=remote_files_that_require_legacy_scaling,
Expand Down Expand Up @@ -551,9 +561,13 @@ def test_parse_path_nifti_without_legacy_scaling():
/ "no-legacy"
/ "BRAINIX_NIFTI_ROI.nii.json"
)
remote_files_that_require_legacy_scaling = {}
adjust_nifti_label_filepath(nifti_annotation_filepath, nifti_filepath)
expected_annotations = parse_darwin_json(expected_annotations_filepath)
parsed_annotations = parse_path(nifti_annotation_filepath, legacy=False)
parsed_annotations = parse_path(
nifti_annotation_filepath,
remote_files_that_require_legacy_scaling=remote_files_that_require_legacy_scaling,
)
for frame_idx in expected_annotations.annotations[0].frames:
expected_annotation = (
expected_annotations.annotations[0].frames[frame_idx].data["paths"]
Expand Down
Loading