Skip to content

Commit

Permalink
2023-06-28 nightly release (25c8a3a)
Browse files Browse the repository at this point in the history
  • Loading branch information
chronos_secgrp_pytorch_oss_ci_oncall committed Jun 28, 2023
1 parent 745b371 commit 5ca14b6
Show file tree
Hide file tree
Showing 9 changed files with 333 additions and 308 deletions.
154 changes: 154 additions & 0 deletions test/test_datapoints.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
from copy import deepcopy

import pytest
import torch
from common_utils import assert_equal
from PIL import Image

from torchvision import datapoints
Expand Down Expand Up @@ -30,3 +33,154 @@ def test_bbox_instance(data, format):
if isinstance(format, str):
format = datapoints.BoundingBoxFormat[(format.upper())]
assert bboxes.format == format


@pytest.mark.parametrize(
("data", "input_requires_grad", "expected_requires_grad"),
[
([[[0.0, 1.0], [0.0, 1.0]]], None, False),
([[[0.0, 1.0], [0.0, 1.0]]], False, False),
([[[0.0, 1.0], [0.0, 1.0]]], True, True),
(torch.rand(3, 16, 16, requires_grad=False), None, False),
(torch.rand(3, 16, 16, requires_grad=False), False, False),
(torch.rand(3, 16, 16, requires_grad=False), True, True),
(torch.rand(3, 16, 16, requires_grad=True), None, True),
(torch.rand(3, 16, 16, requires_grad=True), False, False),
(torch.rand(3, 16, 16, requires_grad=True), True, True),
],
)
def test_new_requires_grad(data, input_requires_grad, expected_requires_grad):
datapoint = datapoints.Image(data, requires_grad=input_requires_grad)
assert datapoint.requires_grad is expected_requires_grad


def test_isinstance():
assert isinstance(datapoints.Image(torch.rand(3, 16, 16)), torch.Tensor)


def test_wrapping_no_copy():
tensor = torch.rand(3, 16, 16)
image = datapoints.Image(tensor)

assert image.data_ptr() == tensor.data_ptr()


def test_to_wrapping():
image = datapoints.Image(torch.rand(3, 16, 16))

image_to = image.to(torch.float64)

assert type(image_to) is datapoints.Image
assert image_to.dtype is torch.float64


def test_to_datapoint_reference():
tensor = torch.rand((3, 16, 16), dtype=torch.float64)
image = datapoints.Image(tensor)

tensor_to = tensor.to(image)

assert type(tensor_to) is torch.Tensor
assert tensor_to.dtype is torch.float64


def test_clone_wrapping():
image = datapoints.Image(torch.rand(3, 16, 16))

image_clone = image.clone()

assert type(image_clone) is datapoints.Image
assert image_clone.data_ptr() != image.data_ptr()


def test_requires_grad__wrapping():
image = datapoints.Image(torch.rand(3, 16, 16))

assert not image.requires_grad

image_requires_grad = image.requires_grad_(True)

assert type(image_requires_grad) is datapoints.Image
assert image.requires_grad
assert image_requires_grad.requires_grad


def test_detach_wrapping():
image = datapoints.Image(torch.rand(3, 16, 16), requires_grad=True)

image_detached = image.detach()

assert type(image_detached) is datapoints.Image


def test_other_op_no_wrapping():
image = datapoints.Image(torch.rand(3, 16, 16))

# any operation besides the ones listed in `Datapoint._NO_WRAPPING_EXCEPTIONS` will do here
output = image * 2

assert type(output) is torch.Tensor


@pytest.mark.parametrize(
"op",
[
lambda t: t.numpy(),
lambda t: t.tolist(),
lambda t: t.max(dim=-1),
],
)
def test_no_tensor_output_op_no_wrapping(op):
image = datapoints.Image(torch.rand(3, 16, 16))

output = op(image)

assert type(output) is not datapoints.Image


def test_inplace_op_no_wrapping():
image = datapoints.Image(torch.rand(3, 16, 16))

output = image.add_(0)

assert type(output) is torch.Tensor
assert type(image) is datapoints.Image


def test_wrap_like():
image = datapoints.Image(torch.rand(3, 16, 16))

# any operation besides the ones listed in `Datapoint._NO_WRAPPING_EXCEPTIONS` will do here
output = image * 2

image_new = datapoints.Image.wrap_like(image, output)

assert type(image_new) is datapoints.Image
assert image_new.data_ptr() == output.data_ptr()


@pytest.mark.parametrize(
"datapoint",
[
datapoints.Image(torch.rand(3, 16, 16)),
datapoints.Video(torch.rand(2, 3, 16, 16)),
datapoints.BoundingBox([0.0, 1.0, 2.0, 3.0], format=datapoints.BoundingBoxFormat.XYXY, spatial_size=(10, 10)),
datapoints.Mask(torch.randint(0, 256, (16, 16), dtype=torch.uint8)),
],
)
@pytest.mark.parametrize("requires_grad", [False, True])
def test_deepcopy(datapoint, requires_grad):
if requires_grad and not datapoint.dtype.is_floating_point:
return

datapoint.requires_grad_(requires_grad)

datapoint_deepcopied = deepcopy(datapoint)

assert datapoint_deepcopied is not datapoint
assert datapoint_deepcopied.data_ptr() != datapoint.data_ptr()
assert_equal(datapoint_deepcopied, datapoint)

assert type(datapoint_deepcopied) is type(datapoint)
assert datapoint_deepcopied.requires_grad is requires_grad
assert datapoint_deepcopied.is_leaf
133 changes: 0 additions & 133 deletions test/test_prototype_datapoints.py

This file was deleted.

53 changes: 0 additions & 53 deletions test/test_transforms_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -406,59 +406,6 @@ def was_applied(output, inpt):
assert transform.was_applied(output, input)


@pytest.mark.parametrize("p", [0.0, 1.0])
class TestRandomHorizontalFlip:
def input_expected_image_tensor(self, p, dtype=torch.float32):
input = torch.tensor([[[0, 1], [0, 1]], [[1, 0], [1, 0]]], dtype=dtype)
expected = torch.tensor([[[1, 0], [1, 0]], [[0, 1], [0, 1]]], dtype=dtype)

return input, expected if p == 1 else input

def test_simple_tensor(self, p):
input, expected = self.input_expected_image_tensor(p)
transform = transforms.RandomHorizontalFlip(p=p)

actual = transform(input)

assert_equal(expected, actual)

def test_pil_image(self, p):
input, expected = self.input_expected_image_tensor(p, dtype=torch.uint8)
transform = transforms.RandomHorizontalFlip(p=p)

actual = transform(to_pil_image(input))

assert_equal(expected, pil_to_tensor(actual))

def test_datapoints_image(self, p):
input, expected = self.input_expected_image_tensor(p)
transform = transforms.RandomHorizontalFlip(p=p)

actual = transform(datapoints.Image(input))

assert_equal(datapoints.Image(expected), actual)

def test_datapoints_mask(self, p):
input, expected = self.input_expected_image_tensor(p)
transform = transforms.RandomHorizontalFlip(p=p)

actual = transform(datapoints.Mask(input))

assert_equal(datapoints.Mask(expected), actual)

def test_datapoints_bounding_box(self, p):
input = datapoints.BoundingBox([0, 0, 5, 5], format=datapoints.BoundingBoxFormat.XYXY, spatial_size=(10, 10))
transform = transforms.RandomHorizontalFlip(p=p)

actual = transform(input)

expected_image_tensor = torch.tensor([5, 0, 10, 5]) if p == 1.0 else input
expected = datapoints.BoundingBox.wrap_like(input, expected_image_tensor)
assert_equal(expected, actual)
assert actual.format == expected.format
assert actual.spatial_size == expected.spatial_size


@pytest.mark.parametrize("p", [0.0, 1.0])
class TestRandomVerticalFlip:
def input_expected_image_tensor(self, p, dtype=torch.float32):
Expand Down
Loading

0 comments on commit 5ca14b6

Please sign in to comment.