Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Initial implicit/truncated backward modes #29

Merged
merged 14 commits into from
Jan 19, 2022
Prev Previous commit
Next Next commit
add type hints/remove unused track_best_solution
  • Loading branch information
bamos committed Jan 19, 2022
commit 9e61d13ea0c8a3e079d5fea027fc6009c27051b9
14 changes: 5 additions & 9 deletions theseus/optimizer/nonlinear/nonlinear_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,12 +167,11 @@ def _update_info(
# loop for the iterative optimizer
def _optimize_loop(
self,
bamos marked this conversation as resolved.
Show resolved Hide resolved
start_iter,
num_iter,
info,
track_best_solution,
verbose,
truncated_grad_loop,
start_iter: int,
num_iter: int,
info: NonlinearOptimizerInfo,
verbose: bool,
truncated_grad_loop: bool,
**kwargs,
):
converged_indices = torch.zeros_like(info.last_err).bool()
Expand Down Expand Up @@ -256,7 +255,6 @@ def _optimize_impl(
start_iter=0,
num_iter=self.params.max_iterations,
info=info,
track_best_solution=track_best_solution,
verbose=verbose,
truncated_grad_loop=False,
**kwargs,
Expand All @@ -277,7 +275,6 @@ def _optimize_impl(
start_iter=0,
num_iter=num_no_grad_iter,
info=info,
track_best_solution=track_best_solution,
verbose=verbose,
truncated_grad_loop=False,
**kwargs,
Expand All @@ -288,7 +285,6 @@ def _optimize_impl(
start_iter=0,
num_iter=backward_num_iterations,
info=grad_loop_info,
track_best_solution=False,
verbose=verbose,
truncated_grad_loop=True,
**kwargs,
Expand Down