Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix autotest inplace bug, hardsigmod #7276

Merged
merged 12 commits into from
Jan 19, 2022
1 change: 1 addition & 0 deletions docs/source/oneflow.rst
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,7 @@ oneflow
zeros,
zeros_like,
is_nonzero,
is_tensor,
no_grad,
grad_enable,
inference_mode,
Expand Down
1 change: 1 addition & 0 deletions python/oneflow/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,7 @@ def atexit_hook(hook):
adaptive_avg_pool2d,
adaptive_avg_pool3d,
)
from oneflow.nn.modules.is_tensor import is_tensor_op as is_tensor
from oneflow.nn.modules.arange import arange_op as arange
from oneflow.nn.modules.linspace import linspace_op as linspace
from oneflow.nn.modules.argsort import argsort_op as argsort
Expand Down
43 changes: 43 additions & 0 deletions python/oneflow/nn/modules/is_tensor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
"""
Copyright 2020 The OneFlow Authors. All rights reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""

import oneflow as flow


def is_tensor_op(obj):
r"""
is_tensor(input) -> (bool)

Note that this function is simply doing ``isinstance(obj, Tensor)``.
Using that ``isinstance`` check is better for typechecking with mypy,
and more explicit - so it's recommended to use that instead of
``is_tensor``.

Args:
obj (Object): Object to test

For example:

.. code-block:: python

>>> import oneflow as flow

>>> x=flow.tensor([1,2,3])
>>> flow.is_tensor(x)
True

"""
return isinstance(obj, flow.Tensor)
2 changes: 1 addition & 1 deletion python/oneflow/test/modules/test_activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ def test_hardsigmoid_module_with_random_data(test_case):
y = m(x)
return y

@autotest(check_graph=False)
@autotest(check_graph=True)
def test_functional_hardsigmoid_with_random_data(test_case):
device = random_device()
x = random_pytorch_tensor().to(device)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import inspect
import os
import warnings
import copy

import numpy as np
import oneflow as flow
Expand Down Expand Up @@ -317,6 +318,19 @@ def dual_method(self, *args, **kwargs):
if name in postulate:
oneflow_res = torch_tensor_to_flow(pytorch_res)
else:
graph_args = []
for arg in oneflow_args:
if flow.is_tensor(arg):
copy_arg = arg.clone().detach()
else:
copy_arg = copy.deepcopy(arg)
graph_args.append(copy_arg)
graph_kwargs = {}
for key, value in oneflow_kwargs.items():
if flow.is_tensor(value):
graph_kwargs[key] = value.clone().detach()
else:
graph_kwargs[key] = copy.deepcopy(value)
oneflow_res = oneflow(*oneflow_args, **oneflow_kwargs)
if testing_graph:
find_check_module_func = True
Expand All @@ -336,7 +350,8 @@ def build(self, *args):
if verbose:
print("Run graph of module: ", repr(oneflow))
test_g.debug(3)
test_g_res = test_g(*oneflow_args)
# When testing module methods, kwargs are not considered.
test_g_res = test_g(*graph_args)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

注释下 test module时,没有考虑 kwargs

elif oneflow.__name__ in ignore_apis_list:
find_check_module_func = False
# 1. "oneflow.nn.modules" not in oneflow.__module__: For avoid run nn.Module branch graph test, like fold op call Fold Module actually.
Expand All @@ -356,9 +371,7 @@ def __init__(self):
super().__init__()

def build(self):
return oneflow(
*oneflow_args, **oneflow_kwargs
)
return oneflow(*graph_args, **graph_kwargs)

try:
# When the tensor on the cpu executes to to the cpu in nn.Graph, a check error will be reported.
Expand Down Expand Up @@ -446,6 +459,19 @@ def dual_method(self, *args, **kwargs):
"PyTorch has an error but OneFlow is ok, maybe you should check your implementation to align with PyTorch."
)
raise PyTorchDoesNotSupportError(e)
tensor_graph_args = []
for arg in oneflow_args:
if flow.is_tensor(arg):
copy_arg = arg.clone().detach()
else:
copy_arg = copy.deepcopy(arg)
tensor_graph_args.append(copy_arg)
tensor_graph_kwargs = {}
for key, value in oneflow_kwargs.items():
if flow.is_tensor(value):
tensor_graph_kwargs[key] = value.clone().detach()
else:
tensor_graph_kwargs[key] = copy.deepcopy(value)
oneflow_res = oneflow_method(*oneflow_args, **oneflow_kwargs)
if testing_graph:

Expand All @@ -455,7 +481,7 @@ def __init__(self):

def build(self):
return oneflow_method(
*oneflow_args, **oneflow_kwargs
*tensor_graph_args, **tensor_graph_kwargs
)

try:
Expand Down