From 43a77a6bf2a8ae735cfcbde2fb387f6b7e1c5695 Mon Sep 17 00:00:00 2001 From: Ricardo Vieira Date: Wed, 26 Jun 2024 21:58:35 +0200 Subject: [PATCH] Add and test `doctest-modules` --- pyproject.toml | 2 +- pytensor/gradient.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6ce096d7b2..5b01213442 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -116,7 +116,7 @@ versionfile_build = "pytensor/_version.py" tag_prefix = "rel-" [tool.pytest] -addopts = "--durations=50" +addopts = "--durations=50 --doctest-modules" testpaths = "tests/" [tool.ruff] diff --git a/pytensor/gradient.py b/pytensor/gradient.py index 11a00a4757..faf52bf745 100644 --- a/pytensor/gradient.py +++ b/pytensor/gradient.py @@ -2086,13 +2086,13 @@ def hessian_vector_product(cost, wrt, p, **grad_kwargs): from pytensor import function from pytensor.tensor import vector - from pytensor.gradient import jacobian, hessian_vector_product + from pytensor.gradient import grad, hessian_vector_product x = vector('x') p = vector('p') rosen = (100 * (x[1:] - x[:-1] ** 2) ** 2 + (1 - x[:-1]) ** 2).sum() - rosen_jac = jacobian(rosen, x) + rosen_jac = grad(rosen, x) rosen_hessp = hessian_vector_product(rosen, x, p) rosen_fn = function([x], rosen) @@ -2107,6 +2107,7 @@ def hessian_vector_product(cost, wrt, p, **grad_kwargs): hessp=rosen_hessp_fn, options={"xtol": 1e-8}, ) + assert 0 assert res.success np.testing.assert_allclose(res.x, np.ones_like(x0))