@@ -317,32 +317,6 @@ static PyObject * python_exit_dual_level(PyObject* _unused, PyObject* args, PyOb
317317 END_HANDLE_TH_ERRORS
318318}
319319
320- static PyObject * python_make_dual (PyObject* _unused, PyObject* args, PyObject* kwargs) {
321- HANDLE_TH_ERRORS
322- static PythonArgParser parser ({
323- " make_dual(Tensor tensor, Tensor tangent, *, int64_t level)"
324- });
325-
326- ParsedArgs<3 > parsed_args;
327- auto _r = parser.parse (args, kwargs, parsed_args);
328-
329- return utils::wrap (forward_ad::make_dual (_r.tensor (0 ), _r.tensor (1 ), _r.toInt64 (2 )));
330- END_HANDLE_TH_ERRORS
331- }
332-
333- static PyObject * python_unpack_dual (PyObject* _unused, PyObject* args, PyObject* kwargs) {
334- HANDLE_TH_ERRORS
335- static PythonArgParser parser ({
336- " unpack_dual(Tensor tensor, *, int64_t level)"
337- });
338-
339- ParsedArgs<2 > parsed_args;
340- auto _r = parser.parse (args, kwargs, parsed_args);
341-
342- return utils::wrap (forward_ad::unpack_dual (_r.tensor (0 ), _r.toInt64 (1 )));
343- END_HANDLE_TH_ERRORS
344- }
345-
346320// autograd methods on torch._C
347321static PyMethodDef methods[] = { // NOLINT
348322 {" _set_grad_enabled" , set_grad_enabled, METH_O, nullptr },
@@ -356,8 +330,6 @@ static PyMethodDef methods[] = { // NOLINT
356330 {" autocast_decrement_nesting" , autocast_decrement_nesting, METH_NOARGS, nullptr },
357331 {" set_anomaly_enabled" , set_anomaly_mode_enabled, METH_O, nullptr },
358332 {" is_anomaly_enabled" , is_anomaly_mode_enabled, METH_NOARGS, nullptr },
359- {" _make_dual" , castPyCFunctionWithKeywords (python_make_dual), METH_VARARGS | METH_KEYWORDS, nullptr },
360- {" _unpack_dual" , castPyCFunctionWithKeywords (python_unpack_dual), METH_VARARGS | METH_KEYWORDS, nullptr },
361333 {" _enter_dual_level" , python_enter_dual_level, METH_NOARGS, nullptr },
362334 {" _exit_dual_level" , castPyCFunctionWithKeywords (python_exit_dual_level), METH_VARARGS | METH_KEYWORDS, nullptr },
363335 {nullptr , nullptr , 0 , nullptr }
0 commit comments