Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DAR-4924][External] Resolving issues with import & export of NifTI annotations #979

Merged
merged 4 commits into from
Dec 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 22 additions & 7 deletions darwin/dataset/remote_dataset_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
Tuple,
Union,
)

import numpy as np
from pydantic import ValidationError
from requests.models import Response

Expand Down Expand Up @@ -873,10 +873,15 @@ def register_multi_slotted(
print(f"Reistration complete. Check your items in the dataset: {self.slug}")
return results

def _get_remote_files_that_require_legacy_scaling(self) -> List[Path]:
def _get_remote_files_that_require_legacy_scaling(
self,
) -> Dict[str, Dict[str, Any]]:
"""
Get all remote files that have been scaled upon upload. These files require that
NifTI annotations are similarly scaled during import
NifTI annotations are similarly scaled during import.

The in-platform affines are returned for each legacy file, as this is required
to properly re-orient the annotations during import.

Parameters
----------
Expand All @@ -885,21 +890,31 @@ def _get_remote_files_that_require_legacy_scaling(self) -> List[Path]:

Returns
-------
List[Path]
A list of full remote paths of dataset items that require NifTI annotations to be scaled
Dict[str, Dict[str, Any]]
A dictionary of remote file full paths to their slot affine maps
"""
remote_files_that_require_legacy_scaling = []
remote_files_that_require_legacy_scaling = {}
remote_files = self.fetch_remote_files(
filters={"statuses": ["new", "annotate", "review", "complete", "archived"]}
)
for remote_file in remote_files:
if not remote_file.slots[0].get("metadata", {}).get("medical", {}):
continue
if not (
remote_file.slots[0]
.get("metadata", {})
.get("medical", {})
.get("handler")
):
remote_files_that_require_legacy_scaling.append(remote_file.full_path)
slot_affine_map = {}
for slot in remote_file.slots:
slot_affine_map[slot["slot_name"]] = np.array(
slot["metadata"]["medical"]["affine"],
dtype=np.float64,
)
remote_files_that_require_legacy_scaling[
Path(remote_file.full_path)
] = slot_affine_map
JBWilkie marked this conversation as resolved.
Show resolved Hide resolved

return remote_files_that_require_legacy_scaling

Expand Down
63 changes: 51 additions & 12 deletions darwin/exporter/formats/nifti.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ def _console_theme() -> Theme:
console = Console(theme=_console_theme())
try:
import nibabel as nib
from nibabel.orientations import io_orientation, ornt_transform
except ImportError:
import_fail_string = r"""
You must install darwin-py with pip install darwin-py\[medical]
Expand Down Expand Up @@ -128,7 +127,11 @@ def export(
polygon_annotations, slot_map, output_volumes, legacy=legacy
)
write_output_volume_to_disk(
output_volumes, image_id=image_id, output_dir=output_dir, legacy=legacy
output_volumes,
image_id=image_id,
output_dir=output_dir,
legacy=legacy,
filename=video_annotation.filename,
)
# Need to map raster layers to SeriesInstanceUIDs
if mask_present:
Expand Down Expand Up @@ -161,6 +164,7 @@ def export(
image_id=image_id,
output_dir=output_dir,
legacy=legacy,
filename=video_annotation.filename,
)


Expand Down Expand Up @@ -456,6 +460,7 @@ def write_output_volume_to_disk(
image_id: str,
output_dir: Union[str, Path],
legacy: bool = False,
filename: str = None,
) -> None:
"""Writes the given output volumes to disk.

Expand All @@ -470,6 +475,8 @@ def write_output_volume_to_disk(
legacy : bool, default=False
If ``True``, the exporter will use the legacy calculation.
If ``False``, the exporter will use the new calculation by dividing with pixdims.
filename: str
The filename of the dataset item

Returns
-------
Expand All @@ -489,18 +496,10 @@ def unnest_dict_to_list(d: Dict) -> List:
volumes = unnest_dict_to_list(output_volumes)
for volume in volumes:
img = nib.Nifti1Image(
dataobj=np.flip(volume.pixel_array, (0, 1, 2)).astype(np.int16),
dataobj=volume.pixel_array.astype(np.int16),
affine=volume.affine,
)
JBWilkie marked this conversation as resolved.
Show resolved Hide resolved
if legacy and volume.original_affine is not None:
orig_ornt = io_orientation(
volume.original_affine
) # Get orientation of current affine
img_ornt = io_orientation(volume.affine) # Get orientation of RAS affine
from_canonical = ornt_transform(
img_ornt, orig_ornt
) # Get transform from RAS to current affine
img = img.as_reoriented(from_canonical)
img = _get_reoriented_nifti_image(img, volume, legacy, filename)
if volume.from_raster_layer:
output_path = Path(output_dir) / f"{image_id}_{volume.class_name}_m.nii.gz"
else:
Expand All @@ -510,6 +509,46 @@ def unnest_dict_to_list(d: Dict) -> List:
nib.save(img=img, filename=output_path)


def _get_reoriented_nifti_image(
img: nib.Nifti1Image, volume: Dict, legacy: bool, filename: str
) -> nib.Nifti1Image:
"""
Reorients the given NIfTI image based on the affine of the originally uploaded file.

Files that were uploaded before the `MED_2D_VIEWER` feature are `legacy`. Non-legacy
files are uploaded and re-oriented to the `LPI` orientation. Legacy NifTI
files were treated differently. These files were re-oriented to `LPI`, but their
affine was stored as `RAS`, which is the opposite orientation. We therefore need to
flip the axes of these images to ensure alignment.

Parameters
----------
img: nib.Nifti1Image
The NIfTI image to be reoriented
volume: Dict
The volume containing the affine and original affine
legacy: bool
If ``True``, the exporter will flip all axes of the image if the dataset item
is not a DICOM
If ``False``, the exporter will not flip the axes
filename: str
The filename of the dataset item
"""
if volume.original_affine is not None:
img_ax_codes = nib.orientations.aff2axcodes(volume.affine)
Copy link
Contributor

@dorfmanrobert dorfmanrobert Dec 11, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think this is correct for pre MED_2D due to the volume.affine being (or expected to be at least) RAS

So this flow will try to go from RAS -> original orientation.

But really its meant to go from LPI -> original orientation. I think this is why dataobj=np.flip(volume.pixel_array, (0, 1, 2)).astype(np.int16) used to be invoked: to go from LPI to RAS, so that then this logic would make sense for pre MED_2D files

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Updated

orig_ax_codes = nib.orientations.aff2axcodes(volume.original_affine)
img_ornt = nib.orientations.axcodes2ornt(img_ax_codes)
orig_ornt = nib.orientations.axcodes2ornt(orig_ax_codes)
transform = nib.orientations.ornt_transform(img_ornt, orig_ornt)
img = img.as_reoriented(transform)
is_dicom = filename.lower().endswith(".dcm")
if legacy and not is_dicom:
img = nib.Nifti1Image(
np.flip(img.get_fdata(), (0, 1, 2)).astype(np.int16), img.affine
)
return img


def shift_polygon_coords(
polygon: List[Dict], pixdim: List[Number], legacy: bool = False
) -> List:
Expand Down
67 changes: 49 additions & 18 deletions darwin/importer/formats/nifti.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import warnings
from collections import OrderedDict, defaultdict
from pathlib import Path
from typing import Dict, List, Optional, Tuple
from typing import Dict, List, Optional, Tuple, Any

from rich.console import Console

Expand Down Expand Up @@ -31,8 +31,7 @@

def parse_path(
path: Path,
legacy: bool = False,
remote_files_that_require_legacy_scaling: Optional[List] = [],
remote_files_that_require_legacy_scaling: Dict[Path, Dict[str, Any]] = {},
) -> Optional[List[dt.AnnotationFile]]:
"""
Parses the given ``nifti`` file and returns a ``List[dt.AnnotationFile]`` with the parsed
Expand All @@ -42,9 +41,8 @@ def parse_path(
----------
path : Path
The ``Path`` to the ``nifti`` file.
legacy : bool, default: False
If ``True``, the function will not attempt to resize the annotations to isotropic pixel dimensions.
If ``False``, the function will resize the annotations to isotropic pixel dimensions.
remote_files_that_require_legacy_scaling : Optional[Dict[Path, Dict[str, Any]]]
A dictionary of remote file full paths to their slot affine maps

Returns
-------
Expand Down Expand Up @@ -78,16 +76,20 @@ def parse_path(
return None
annotation_files = []
for nifti_annotation in nifti_annotations:
legacy = nifti_annotation["image"] in remote_files_that_require_legacy_scaling
remote_file_path = Path(nifti_annotation["image"])
if not str(remote_file_path).startswith("/"):
remote_file_path = Path("/" + str(remote_file_path))
JBWilkie marked this conversation as resolved.
Show resolved Hide resolved

annotation_file = _parse_nifti(
Path(nifti_annotation["label"]),
nifti_annotation["image"],
Path(nifti_annotation["image"]),
path,
class_map=nifti_annotation.get("class_map"),
mode=nifti_annotation.get("mode", "image"),
slot_names=nifti_annotation.get("slot_names", []),
is_mpr=nifti_annotation.get("is_mpr", False),
legacy=legacy,
remote_file_path=remote_file_path,
remote_files_that_require_legacy_scaling=remote_files_that_require_legacy_scaling,
)
annotation_files.append(annotation_file)
return annotation_files
Expand All @@ -101,10 +103,16 @@ def _parse_nifti(
mode: str,
slot_names: List[str],
is_mpr: bool,
legacy: bool = False,
remote_file_path: Path,
remote_files_that_require_legacy_scaling: Dict[Path, Dict[str, Any]] = {},
) -> dt.AnnotationFile:
img, pixdims = process_nifti(nib.load(nifti_path))
img, pixdims = process_nifti(
nib.load(nifti_path),
remote_file_path=remote_file_path,
remote_files_that_require_legacy_scaling=remote_files_that_require_legacy_scaling,
)

legacy = remote_file_path in remote_files_that_require_legacy_scaling
processed_class_map = process_class_map(class_map)
video_annotations = []
if mode == "instances": # For each instance produce a video annotation
Expand Down Expand Up @@ -159,11 +167,12 @@ def _parse_nifti(
dt.AnnotationClass(class_name, "mask", "mask")
for class_name in class_map.values()
}

remote_path = "/" if filename.parent == "." else filename.parent
filename = Path(filename.name)
return dt.AnnotationFile(
path=json_path,
filename=str(filename),
remote_path="/",
remote_path=str(remote_path),
annotation_classes=annotation_classes,
JBWilkie marked this conversation as resolved.
Show resolved Hide resolved
annotations=video_annotations,
slots=[
Expand Down Expand Up @@ -353,7 +362,7 @@ def nifti_to_video_polygon_annotation(
if len(all_frame_ids) == 1:
segments = [[all_frame_ids[0], all_frame_ids[0] + 1]]
elif len(all_frame_ids) > 1:
segments = [[min(all_frame_ids), max(all_frame_ids)]]
segments = [[min(all_frame_ids), max(all_frame_ids) + 1]]
video_annotation = dt.make_video_annotation(
JBWilkie marked this conversation as resolved.
Show resolved Hide resolved
frame_annotations,
keyframes={f_id: True for f_id in all_frame_ids},
Expand Down Expand Up @@ -513,16 +522,33 @@ def correct_nifti_header_if_necessary(img_nii):
def process_nifti(
input_data: nib.nifti1.Nifti1Image,
ornt: Optional[List[List[float]]] = [[0.0, -1.0], [1.0, -1.0], [2.0, -1.0]],
remote_file_path: Path = Path("/"),
remote_files_that_require_legacy_scaling: Dict[Path, Dict[str, Any]] = {},
) -> Tuple[np.ndarray, Tuple[float]]:
"""
Converts a nifti object of any orientation to the passed ornt orientation.
Converts a NifTI object of any orientation to the passed ornt orientation.
The default ornt is LPI.

Files that were uploaded before the `MED_2D_VIEWER` feature are `legacy`. Non-legacy
files are uploaded and re-oriented to the `LPI` orientation. Legacy files
files were treated differently:
- Legacy NifTI files were re-oriented to `LPI`, but their
affine was stored as `RAS`, which is the opposite orientation. However, because
their pixel data is stored in `LPI`, we can treat them the same way as non-legacy
files.
- Legacy DICOM files were not always re-oriented to `LPI`. We therefore use the
affine of the dataset item from `slot_affine_map` to re-orient the NifTI file to
be imported

Args:
input_data: nibabel nifti object.
ornt: (n,2) orientation array. It defines a transformation from RAS.
input_data: nibabel NifTI object.
ornt: (n,2) orientation array. It defines a transformation to LPI
ornt[N,1] is a flip of axis N of the array, where 1 means no flip and -1 means flip.
ornt[:,0] is the transpose that needs to be done to the implied array, as in arr.transpose(ornt[:,0]).
remote_file_path: Path
The full path of the remote file
remote_files_that_require_legacy_scaling: Dict[Path, Dict[str, Any]]
A dictionary of remote file full paths to their slot affine maps

Returns:
data_array: pixel array with orientation ornt.
Expand All @@ -531,9 +557,14 @@ def process_nifti(
img = correct_nifti_header_if_necessary(input_data)
orig_ax_codes = nib.orientations.aff2axcodes(img.affine)
orig_ornt = nib.orientations.axcodes2ornt(orig_ax_codes)
is_dicom = remote_file_path.suffix.lower() == ".dcm"
if remote_file_path in remote_files_that_require_legacy_scaling and is_dicom:
slot_affine_map = remote_files_that_require_legacy_scaling[remote_file_path]
affine = slot_affine_map[next(iter(slot_affine_map))] # Take the 1st slot
JBWilkie marked this conversation as resolved.
Show resolved Hide resolved
ax_codes = nib.orientations.aff2axcodes(affine)
ornt = nib.orientations.axcodes2ornt(ax_codes)
transform = nib.orientations.ornt_transform(orig_ornt, ornt)
reoriented_img = img.as_reoriented(transform)

data_array = reoriented_img.get_fdata()
pixdims = reoriented_img.header.get_zooms()

Expand Down
Loading
Loading