Skip to content
This repository has been archived by the owner on Jan 12, 2024. It is now read-only.

Commit

Permalink
Merge pull request #32 from PhoenixDL/resizing
Browse files Browse the repository at this point in the history
Resizing Transform
  • Loading branch information
mibaumgartner authored Feb 17, 2020
2 parents 7a1c8ba + 9499751 commit a160a05
Show file tree
Hide file tree
Showing 3 changed files with 104 additions and 6 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

| Date | Commit | Short Description | Breaking Changes? |
| ---- | --------- | ----------------- | ----------------- |
| 2020-02-17 | 26d7c9432b90247f8b9e0ab3e6881e88fb7749d4 | Add Resizing Transform | No |
| 2020-01-03 | 6b9a7b2fdc7d0b894c0dfcfd94237845fe8b8672 | Affine Trafos | No|
| 2019-12-24 | 6b90197e89dedd7659073bf72037390231a1c278 | Use shared memory for progressive resizing | No |
| 2019-12-17 | 0b881f8e0ce85f380ecf458080c2a3f5cb8c3080 | User-Controllable call dispatch within the compose class | No |
Expand Down
102 changes: 98 additions & 4 deletions rising/transforms/affine.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,17 @@
from rising.transforms.functional.affine import affine_image_transform
from rising.utils.affine import AffineParamType, \
assemble_matrix_if_necessary, matrix_to_homogeneous, matrix_to_cartesian
from rising.utils.checktype import check_scalar
import torch
from typing import Sequence, Union
from typing import Sequence, Union, Iterable

__all__ = [
'Affine',
'StackedAffine',
'Rotate',
'Scale',
'Translate'
'Translate',
'Resize',
]


Expand Down Expand Up @@ -583,5 +585,97 @@ def assemble_matrix(self, **data) -> torch.Tensor:

return matrix_to_cartesian(whole_trafo)

# TODO: Add transforms around image center
# TODO: Add Resize Transform

class Resize(Scale):
def __init__(self,
size: Union[int, Iterable],
keys: Sequence = ('data',),
grad: bool = False,
interpolation_mode: str = 'bilinear',
padding_mode: str = 'zeros',
align_corners: bool = False,
**kwargs):
"""
Class Performing a Resizing Affine Transformation on a given
sample dict.
The transformation will be applied to all the dict-entries specified
in :attr:`keys`.
Parameters
----------
size : int, Iterable
the target size. If int, this will be repeated for all the
dimensions
keys: Sequence
keys which should be augmented
grad: bool
enable gradient computation inside transformation
interpolation_mode : str
interpolation mode to calculate output values
'bilinear' | 'nearest'. Default: 'bilinear'
padding_mode :
padding mode for outside grid values
'zeros' | 'border' | 'reflection'. Default: 'zeros'
align_corners : Geometrically, we consider the pixels of the input as
squares rather than points. If set to True, the extrema (-1 and 1)
are considered as referring to the center points of the input’s
corner pixels. If set to False, they are instead considered as
referring to the corner points of the input’s corner pixels,
making the sampling more resolution agnostic.
**kwargs :
additional keyword arguments passed to the affine transform
Note
----
The offsets for shifting back and to origin are calculated on the
entry matching the first item iin :attr:`keys` for each batch
Note
----
The target size must be specified in x, y (,z) order and will be
converted to (D,) H, W order internally
"""
super().__init__(output_size=size,
scale=None,
keys=keys,
grad=grad,
adjust_size=False,
interpolation_mode=interpolation_mode,
padding_mode=padding_mode,
align_corners=align_corners,
**kwargs)

def assemble_matrix(self, **data) -> torch.Tensor:
"""
Handles the matrix assembly and calculates the scale factors for
resizing
Parameters
----------
**data :
the data to be transformed. Will be used to determine batchsize,
dimensionality, dtype and device
Returns
-------
torch.Tensor
the (batched) transformation matrix
"""
curr_img_size = data[self.keys[0]].shape[2:]

was_scalar = check_scalar(self.output_size)

if was_scalar:
self.output_size = [self.output_size] * len(curr_img_size)

self.scale = [self.output_size[i] / curr_img_size[-i]
for i in range(len(curr_img_size))]

matrix = super().assemble_matrix(**data)

if was_scalar:
self.output_size = self.output_size[0]

return matrix
7 changes: 5 additions & 2 deletions tests/transforms/test_affine.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import unittest
from rising.transforms.affine import Affine, StackedAffine, Translate, Rotate, \
Scale
Scale, Resize
import torch
from copy import deepcopy
from rising.utils.affine import matrix_to_cartesian, matrix_to_homogeneous
Expand Down Expand Up @@ -83,13 +83,16 @@ def test_affine_subtypes(self):
trafos = [
Scale(5),
Rotate(45),
Translate(10)
Translate(10),
Resize((5, 4))
]

for trafo in trafos:
with self.subTest(trafo=trafo):
self.assertIsInstance(trafo(**sample)['data'], torch.Tensor)

self.assertTupleEqual((5, 4), trafos[-1](**sample)['data'].shape[2:])


if __name__ == '__main__':
unittest.main()

0 comments on commit a160a05

Please sign in to comment.