Skip to content

Commit

Permalink
minor bug fix in VectorFunction
Browse files Browse the repository at this point in the history
  • Loading branch information
rfeinman committed May 18, 2022
1 parent 78be8f3 commit b49ca3e
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions torchmin/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,8 +172,8 @@ def closure(self, x):
jacp = JacobianLinearOperator(x, f)
if self._jac:
if self._I is None:
self._I = torch.eye(x.numel(), dtype=x.dtype, device=x.device)
jvp = lambda v: autograd.grad(f, x, v, retain_graph=True)[0]
jac = _vmap(jvp)(self._I)
self._I = torch.eye(f.numel(), dtype=x.dtype, device=x.device)
vjp = lambda v: autograd.grad(f, x, v, retain_graph=True)[0]
jac = _vmap(vjp)(self._I)

return vf_value(f=f.detach(), jacp=jacp, jac=jac)

0 comments on commit b49ca3e

Please sign in to comment.