Skip to content

Commit

Permalink
2023-06-30 nightly release (0e49615)
Browse files Browse the repository at this point in the history
  • Loading branch information
chronos_secgrp_pytorch_oss_ci_oncall committed Jun 30, 2023
1 parent c8848d1 commit 1dc8e77
Show file tree
Hide file tree
Showing 5 changed files with 453 additions and 416 deletions.
124 changes: 0 additions & 124 deletions test/test_transforms_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -668,130 +668,6 @@ def test_boundingbox_spatial_size(self, angle, expand):
assert out_img.spatial_size == out_bbox.spatial_size


class TestRandomAffine:
def test_assertions(self):
with pytest.raises(ValueError, match="is a single number, it must be positive"):
transforms.RandomAffine(-0.7)

for d in [[-0.7], [-0.7, 0, 0.7]]:
with pytest.raises(ValueError, match="degrees should be a sequence of length 2"):
transforms.RandomAffine(d)

with pytest.raises(TypeError, match="Got inappropriate fill arg"):
transforms.RandomAffine(12, fill="abc")

with pytest.raises(TypeError, match="Got inappropriate fill arg"):
transforms.RandomAffine(12, fill="abc")

for kwargs in [
{"center": 12},
{"translate": 12},
{"scale": 12},
]:
with pytest.raises(TypeError, match="should be a sequence of length"):
transforms.RandomAffine(12, **kwargs)

for kwargs in [{"center": [1, 2, 3]}, {"translate": [1, 2, 3]}, {"scale": [1, 2, 3]}]:
with pytest.raises(ValueError, match="should be a sequence of length"):
transforms.RandomAffine(12, **kwargs)

with pytest.raises(ValueError, match="translation values should be between 0 and 1"):
transforms.RandomAffine(12, translate=[-1.0, 2.0])

with pytest.raises(ValueError, match="scale values should be positive"):
transforms.RandomAffine(12, scale=[-1.0, 2.0])

with pytest.raises(ValueError, match="is a single number, it must be positive"):
transforms.RandomAffine(12, shear=-10)

for s in [[-0.7], [-0.7, 0, 0.7]]:
with pytest.raises(ValueError, match="shear should be a sequence of length 2"):
transforms.RandomAffine(12, shear=s)

@pytest.mark.parametrize("degrees", [23, [0, 45], (0, 45)])
@pytest.mark.parametrize("translate", [None, [0.1, 0.2]])
@pytest.mark.parametrize("scale", [None, [0.7, 1.2]])
@pytest.mark.parametrize("shear", [None, 2.0, [5.0, 15.0], [1.0, 2.0, 3.0, 4.0]])
def test__get_params(self, degrees, translate, scale, shear, mocker):
image = mocker.MagicMock(spec=datapoints.Image)
image.num_channels = 3
image.spatial_size = (24, 32)
h, w = image.spatial_size

transform = transforms.RandomAffine(degrees, translate=translate, scale=scale, shear=shear)
params = transform._get_params([image])

if not isinstance(degrees, (list, tuple)):
assert -degrees <= params["angle"] <= degrees
else:
assert degrees[0] <= params["angle"] <= degrees[1]

if translate is not None:
w_max = int(round(translate[0] * w))
h_max = int(round(translate[1] * h))
assert -w_max <= params["translate"][0] <= w_max
assert -h_max <= params["translate"][1] <= h_max
else:
assert params["translate"] == (0, 0)

if scale is not None:
assert scale[0] <= params["scale"] <= scale[1]
else:
assert params["scale"] == 1.0

if shear is not None:
if isinstance(shear, float):
assert -shear <= params["shear"][0] <= shear
assert params["shear"][1] == 0.0
elif len(shear) == 2:
assert shear[0] <= params["shear"][0] <= shear[1]
assert params["shear"][1] == 0.0
else:
assert shear[0] <= params["shear"][0] <= shear[1]
assert shear[2] <= params["shear"][1] <= shear[3]
else:
assert params["shear"] == (0, 0)

@pytest.mark.parametrize("degrees", [23, [0, 45], (0, 45)])
@pytest.mark.parametrize("translate", [None, [0.1, 0.2]])
@pytest.mark.parametrize("scale", [None, [0.7, 1.2]])
@pytest.mark.parametrize("shear", [None, 2.0, [5.0, 15.0], [1.0, 2.0, 3.0, 4.0]])
@pytest.mark.parametrize("fill", [0, [1, 2, 3], (2, 3, 4)])
@pytest.mark.parametrize("center", [None, [2.0, 3.0]])
def test__transform(self, degrees, translate, scale, shear, fill, center, mocker):
interpolation = InterpolationMode.BILINEAR
transform = transforms.RandomAffine(
degrees,
translate=translate,
scale=scale,
shear=shear,
interpolation=interpolation,
fill=fill,
center=center,
)

if isinstance(degrees, (tuple, list)):
assert transform.degrees == [float(degrees[0]), float(degrees[1])]
else:
assert transform.degrees == [float(-degrees), float(degrees)]

fn = mocker.patch("torchvision.transforms.v2.functional.affine")
inpt = mocker.MagicMock(spec=datapoints.Image)
inpt.num_channels = 3
inpt.spatial_size = (24, 32)

# vfdev-5, Feature Request: let's store params as Transform attribute
# This could be also helpful for users
# Otherwise, we can mock transform._get_params
torch.manual_seed(12)
_ = transform(inpt)
torch.manual_seed(12)
params = transform._get_params([inpt])

fill = transforms._utils._convert_fill_arg(fill)
fn.assert_called_once_with(inpt, **params, interpolation=interpolation, fill=fill, center=center)


class TestRandomCrop:
def test_assertions(self):
with pytest.raises(ValueError, match="Please provide only two dimensions"):
Expand Down
83 changes: 0 additions & 83 deletions test/test_transforms_v2_functional.py
Original file line number Diff line number Diff line change
Expand Up @@ -665,77 +665,6 @@ def _compute_affine_matrix(angle_, translate_, scale_, shear_, center_):
return true_matrix


@pytest.mark.parametrize("device", cpu_and_cuda())
def test_correctness_affine_bounding_box_on_fixed_input(device):
# Check transformation against known expected output
format = datapoints.BoundingBoxFormat.XYXY
spatial_size = (64, 64)
in_boxes = [
[20, 25, 35, 45],
[50, 5, 70, 22],
[spatial_size[1] // 2 - 10, spatial_size[0] // 2 - 10, spatial_size[1] // 2 + 10, spatial_size[0] // 2 + 10],
[1, 1, 5, 5],
]
in_boxes = torch.tensor(in_boxes, dtype=torch.float64, device=device)
# Tested parameters
angle = 63
scale = 0.89
dx = 0.12
dy = 0.23

# Expected bboxes computed using albumentations:
# from albumentations.augmentations.geometric.functional import bbox_shift_scale_rotate
# from albumentations.augmentations.geometric.functional import normalize_bbox, denormalize_bbox
# expected_bboxes = []
# for in_box in in_boxes:
# n_in_box = normalize_bbox(in_box, *spatial_size)
# n_out_box = bbox_shift_scale_rotate(n_in_box, -angle, scale, dx, dy, *spatial_size)
# out_box = denormalize_bbox(n_out_box, *spatial_size)
# expected_bboxes.append(out_box)
expected_bboxes = [
(24.522435977922218, 34.375689508290854, 46.443125279998114, 54.3516575015695),
(54.88288587110401, 50.08453280875634, 76.44484547743795, 72.81332520036864),
(27.709526487041554, 34.74952648704156, 51.650473512958435, 58.69047351295844),
(48.56528888843238, 9.611532109828834, 53.35347829361575, 14.39972151501221),
]

expected_bboxes = clamp_bounding_box(
datapoints.BoundingBox(expected_bboxes, format="XYXY", spatial_size=spatial_size)
).tolist()

output_boxes = F.affine_bounding_box(
in_boxes,
format=format,
spatial_size=spatial_size,
angle=angle,
translate=(dx * spatial_size[1], dy * spatial_size[0]),
scale=scale,
shear=(0, 0),
)

torch.testing.assert_close(output_boxes.tolist(), expected_bboxes)


@pytest.mark.parametrize("device", cpu_and_cuda())
def test_correctness_affine_segmentation_mask_on_fixed_input(device):
# Check transformation against known expected output and CPU/CUDA devices

# Create a fixed input segmentation mask with 2 square masks
# in top-left, bottom-left corners
mask = torch.zeros(1, 32, 32, dtype=torch.long, device=device)
mask[0, 2:10, 2:10] = 1
mask[0, 32 - 9 : 32 - 3, 3:9] = 2

# Rotate 90 degrees and scale
expected_mask = torch.rot90(mask, k=-1, dims=(-2, -1))
expected_mask = torch.nn.functional.interpolate(expected_mask[None, :].float(), size=(64, 64), mode="nearest")
expected_mask = expected_mask[0, :, 16 : 64 - 16, 16 : 64 - 16].long()

out_mask = F.affine_mask(mask, 90, [0.0, 0.0], 64.0 / 32.0, [0.0, 0.0])

torch.testing.assert_close(out_mask, expected_mask)


@pytest.mark.parametrize("angle", range(-90, 90, 56))
@pytest.mark.parametrize("expand, center", [(True, None), (False, None), (False, (12, 14))])
def test_correctness_rotate_bounding_box(angle, expand, center):
Expand Down Expand Up @@ -950,18 +879,6 @@ def test_correctness_crop_bounding_box(device, format, top, left, height, width,
torch.testing.assert_close(output_spatial_size, spatial_size)


@pytest.mark.parametrize("device", cpu_and_cuda())
def test_correctness_horizontal_flip_segmentation_mask_on_fixed_input(device):
mask = torch.zeros((3, 3, 3), dtype=torch.long, device=device)
mask[:, :, 0] = 1

out_mask = F.horizontal_flip_mask(mask)

expected_mask = torch.zeros((3, 3, 3), dtype=torch.long, device=device)
expected_mask[:, :, -1] = 1
torch.testing.assert_close(out_mask, expected_mask)


@pytest.mark.parametrize("device", cpu_and_cuda())
def test_correctness_vertical_flip_segmentation_mask_on_fixed_input(device):
mask = torch.zeros((3, 3, 3), dtype=torch.long, device=device)
Expand Down
Loading

0 comments on commit 1dc8e77

Please sign in to comment.