Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Geom api #7490

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions monai/transforms/spatial/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,6 +268,7 @@ def __call__( # type: ignore
align_corners: bool | None = None,
dtype: DtypeLike = None,
lazy: bool | None = None,
update_to_match: Sequence[NdarrayOrTensor] | None = None,
) -> torch.Tensor:
"""
Args:
Expand Down Expand Up @@ -321,6 +322,8 @@ def __call__( # type: ignore
original_fname = img.meta.get(Key.FILENAME_OR_OBJ, "resample_to_match_source")
img.meta = deepcopy(img_dst.meta)
img.meta[Key.FILENAME_OR_OBJ] = original_fname # keep the original name, the others are overwritten
for t in update_to_match or []:
t.apply_transform(dst_affine, lazy=False) # PROBLEM: this is in-place; we'd have to return the transforms
else:
if isinstance(img, MetaTensor) and isinstance(img_dst, MetaTensor):
original_fname = img.meta.get(Key.FILENAME_OR_OBJ, "resample_to_match_source")
Expand All @@ -329,6 +332,8 @@ def __call__( # type: ignore
meta_dict.pop(k, None)
img.meta.update(meta_dict)
img.meta[Key.FILENAME_OR_OBJ] = original_fname # keep the original name, the others are overwritten
for t in update_to_match or []:
t.apply_transform(img.get_latest_transform(), lazy=True) # Not a problem as it is lazy
return img


Expand Down
73 changes: 64 additions & 9 deletions monai/transforms/spatial/dictionary.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ def __call__(self, data: Mapping[Hashable, torch.Tensor], lazy: bool | None = No
lazy_ = self.lazy if lazy is None else lazy
d: dict = dict(data)
for key, mode, padding_mode, align_corners, dtype, dst_key in self.key_iterator(
d, self.mode, self.padding_mode, self.align_corners, self.dtype, self.dst_keys
d, KindType.PIXEL, self.mode, self.padding_mode, self.align_corners, self.dtype, self.dst_keys
):
d[key] = self.sp_transform(
img=d[key],
Expand Down Expand Up @@ -338,8 +338,16 @@ def __call__(self, data: Mapping[Hashable, torch.Tensor], lazy: bool | None = No
"""
lazy_ = self.lazy if lazy is None else lazy
d = dict(data)

# get the first raster tensor (there must be at least one)
first_raster = self.first_key(data, kind=KindType.PIXEL)
if isinstance(first_raster, tuple) and len(first_raster) is None:
raise ValueError(f"At least of the specified keys must be of type {KindType.PIXEL}.")

# OPTION 1:
# update the raster tensors using the resampler
for key, mode, padding_mode, align_corners, dtype in self.key_iterator(
d, self.mode, self.padding_mode, self.align_corners, self.dtype
d, kind=KindType.PIXEL, self.mode, self.padding_mode, self.align_corners, self.dtype
):
d[key] = self.resampler(
img=d[key],
Expand All @@ -350,6 +358,27 @@ def __call__(self, data: Mapping[Hashable, torch.Tensor], lazy: bool | None = No
dtype=dtype,
lazy=lazy_,
)

# update the geometry tensors from the first raster tensor
transform = d[first_raster].get_latest_transform()
for key in self.key_iterator(d, kind=KindType.GEOM):
d[key].apply_transform(transform)

# OPTION 2:
geometries = [d[key] for k in self.key_iterator(d, kind=KindType.GEOM)]
for key, mode, padding_mode, align_corners, dtype in self.key_iterator(
d, kind=KindType.PIXEL, self.mode, self.padding_mode, self.align_corners, self.dtype
):
d[key] = self.resampler(
img=d[key],
img_dst=d[self.key_dst],
mode=mode,
padding_mode=padding_mode,
align_corners=align_corners,
dtype=dtype,
lazy=lazy_,
update_to_match=geometries if key == first_raster else None,
)
return d

def inverse(self, data: Mapping[Hashable, torch.Tensor]) -> dict[Hashable, torch.Tensor]:
Expand Down Expand Up @@ -1627,7 +1656,11 @@ class RandAxisFlipd(RandomizableTransform, MapTransform, InvertibleTransform, La
backend = RandAxisFlip.backend

def __init__(
self, keys: KeysCollection, prob: float = 0.1, allow_missing_keys: bool = False, lazy: bool = False
self,
keys: KeysCollection,
prob: float = 0.1,
allow_missing_keys: bool = False,
lazy: bool = False,
) -> None:
MapTransform.__init__(self, keys, allow_missing_keys)
RandomizableTransform.__init__(self, prob)
Expand Down Expand Up @@ -1762,12 +1795,34 @@ def __call__(self, data: Mapping[Hashable, torch.Tensor], lazy: bool | None = No
"""
d = dict(data)
lazy_ = self.lazy if lazy is None else lazy
for key, mode, padding_mode, align_corners, dtype in self.key_iterator(
d, self.mode, self.padding_mode, self.align_corners, self.dtype
):
d[key] = self.rotator(
d[key], mode=mode, padding_mode=padding_mode, align_corners=align_corners, dtype=dtype, lazy=lazy_
)

# get the first raster tensor, if one is present
first = self.first_key(d, kind=KindType.PIXEL)
if isinstance(first, tuple) and len(first) == 0 and self.rotator.keep_size == False:

for key, mode, padding_mode, align_corners, dtype in self.key_iterator(
d, self.mode, self.padding_mode, self.align_corners, self.dtype
):
d[key] = self.rotator(
d[key], mode=mode, padding_mode=padding_mode, align_corners=align_corners, dtype=dtype, lazy=lazy_
)
else:
# pass 1: perform the rotation for the image data, to calculate
for key, mode, padding_mode, align_corners, dtype in self.key_iterator(
d, KindType.PIXEL, self.mode, self.padding_mode, self.align_corners, self.dtype
):
d[key] = self.rotator(
d[key], mode=mode, padding_mode=padding_mode, align_corners=align_corners, dtype=dtype, lazy=lazy_
)
# pass 2: perform the rotation for non-image data
for key, mode, padding_mode, align_corners, dtype in self.key_iterator(
d, KindType.GEOM, self.mode, self.padding_mode, self.align_corners, self.dtype
):
d[key] = self.rotator(
d[key], mode=mode, padding_mode=padding_mode, align_corners=align_corners, dtype=dtype, lazy=lazy_
)


return d

def inverse(self, data: Mapping[Hashable, torch.Tensor]) -> dict[Hashable, torch.Tensor]:
Expand Down
23 changes: 18 additions & 5 deletions monai/transforms/transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -395,7 +395,17 @@ def __new__(cls, *args, **kwargs):
cls.inverse: Any = transforms.attach_hook(cls.inverse, transforms.InvertibleTransform.inverse_update)
return Transform.__new__(cls)

def __init__(self, keys: KeysCollection, allow_missing_keys: bool = False) -> None:
def __init__(
self,
keys: KeysCollection,
allow_missing_keys: bool = False
) -> None:
"""
Args:
keys: a collection of keys that should be visited by this transform instance
allow_missing_keys: a flag to indicate whether the transform instance tolerates missing keys from the input data
"""
# TODO: KindType is provided by https://github.com/Project-MONAI/MONAI/pull/7488
super().__init__()
self.keys: tuple[Hashable, ...] = ensure_tuple(keys)
self.allow_missing_keys = allow_missing_keys
Expand Down Expand Up @@ -453,7 +463,7 @@ def __call__(self, data):
"""
raise NotImplementedError(f"Subclass {self.__class__.__name__} must implement this method.")

def key_iterator(self, data: Mapping[Hashable, Any], *extra_iterables: Iterable | None) -> Generator:
def key_iterator(self, data: Mapping[Hashable, Any], kind: KindType | None = None, *extra_iterables: Iterable | None) -> Generator:
"""
Iterate across keys and optionally extra iterables. If key is missing, exception is raised if
`allow_missing_keys==False` (default). If `allow_missing_keys==True`, key is skipped.
Expand All @@ -470,14 +480,17 @@ def key_iterator(self, data: Mapping[Hashable, Any], *extra_iterables: Iterable
for key, *_ex_iters in zip(self.keys, *ex_iters):
# all normal, yield (what we yield depends on whether extra iterables were given)
if key in data:
yield (key,) + tuple(_ex_iters) if extra_iterables else key
if kind is None or data[key].kind == kind:
yield (key,) + tuple(_ex_iters) if extra_iterables else key
elif not self.allow_missing_keys:
raise KeyError(
f"Key `{key}` of transform `{self.__class__.__name__}` was missing in the data"
" and allow_missing_keys==False."
)

def first_key(self, data: dict[Hashable, Any]):


def first_key(self, data: dict[Hashable, Any], kind: KindType | None = None) -> Hashable | tuple:
"""
Get the first available key of `self.keys` in the input `data` dictionary.
If no available key, return an empty tuple `()`.
Expand All @@ -486,4 +499,4 @@ def first_key(self, data: dict[Hashable, Any]):
data: data that the transform will be applied to.

"""
return first(self.key_iterator(data), ())
return first(self.key_iterator(data, kind=kind), ())
Loading