Skip to content

Commit

Permalink
Dev add jacobian hessian test (#10428)
Browse files Browse the repository at this point in the history
添加jacobian、hessian接口的测试代码
  • Loading branch information
lihuizhao committed Feb 2, 2024
1 parent 8250384 commit 206a195
Showing 1 changed file with 40 additions and 21 deletions.
61 changes: 40 additions & 21 deletions python/oneflow/test/misc/test_autograd_functional.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
See the License for the specific language governing permissions and
limitations under the License.
"""
from packaging import version
import unittest
import oneflow as flow
import oneflow.unittest
Expand Down Expand Up @@ -110,44 +111,62 @@ def test_hvp(test_case):
)
result_tensors = torch.autograd.functional.hvp(_func_multi_scalar, inputs, v)

# TODO: The local test of test_jacobian and test_hessian passed, but the ci test failed
"""
# TODO: "'jacobian' and 'hessian' has no strategy parameter in PyTorch before '1.11.0'"
@autotest(n=1, check_graph=False)
def test_jacobian(test_case):
inputs = random_tensor(ndim=2, dim0=5, dim1=5)
result_tensor = torch.autograd.functional.jacobian(
_func_tensor, inputs, vectorize=False, strategy="reverse-mode"
)
if version.parse(torch.pytorch.__version__) < version.parse("1.11.0"):
result_tensor = torch.autograd.functional.jacobian(
_func_tensor, inputs, vectorize=False
)
else:
result_tensor = torch.autograd.functional.jacobian(
_func_tensor, inputs, vectorize=False, strategy="reverse-mode"
)

inputs = (
random_tensor(ndim=2, dim0=5, dim1=5),
random_tensor(ndim=2, dim0=5, dim1=5),
)
result_tensors = torch.autograd.functional.jacobian(
_func_multi_scalar, inputs, vectorize=False, strategy="reverse-mode"
)
if version.parse(torch.pytorch.__version__) < version.parse("1.11.0"):
result_tensors = torch.autograd.functional.jacobian(
_func_multi_scalar, inputs, vectorize=False
)
else:
result_tensors = torch.autograd.functional.jacobian(
_func_multi_scalar, inputs, vectorize=False, strategy="reverse-mode"
)

@autotest(n=1, check_graph=False)
def test_hessian(test_case):
inputs = random_tensor(ndim=2, dim0=5, dim1=5)
result_tensor = torch.autograd.functional.hessian(
_func_scalar,
inputs,
vectorize=False,
outer_jacobian_strategy="reverse-mode",
)
if version.parse(torch.pytorch.__version__) < version.parse("1.11.0"):
result_tensor = torch.autograd.functional.hessian(
_func_scalar, inputs, vectorize=False,
)
else:
result_tensor = torch.autograd.functional.hessian(
_func_scalar,
inputs,
vectorize=False,
outer_jacobian_strategy="reverse-mode",
)

inputs = (
random_tensor(ndim=2, dim0=5, dim1=5),
random_tensor(ndim=2, dim0=5, dim1=5),
)
result_tensors = torch.autograd.functional.hessian(
_func_multi_scalar,
inputs,
vectorize=False,
outer_jacobian_strategy="reverse-mode",
)
"""
if version.parse(torch.pytorch.__version__) < version.parse("1.11.0"):
result_tensors = torch.autograd.functional.hessian(
_func_multi_scalar, inputs, vectorize=False,
)
else:
result_tensors = torch.autograd.functional.hessian(
_func_multi_scalar,
inputs,
vectorize=False,
outer_jacobian_strategy="reverse-mode",
)


if __name__ == "__main__":
Expand Down

0 comments on commit 206a195

Please sign in to comment.