Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Vectorization refactor #205

Merged
merged 27 commits into from
Jun 9, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
424c714
Created a wrapper cost function class that combines the aux vars for …
luisenp Jun 1, 2022
ee9e235
Disabled support for optimization variables in cost weights.
luisenp Jun 1, 2022
ea74465
Changed Objective to iterate over CFWrapper if available, and Theseus…
luisenp Jun 1, 2022
1b3af0b
Added a Vectorizer class and moved CFWrappers there.
luisenp Jun 1, 2022
2d3f9d2
Renamed vectorizer as Vectorize, added logic to replace Objective ite…
luisenp Jun 1, 2022
6a146cb
Added a CostFunctionSchema -> List[CostFunction] to use for vectoriza…
luisenp Jun 2, 2022
6c6a887
_CostFunctionWrapper is now meant to just store a cached value coming…
luisenp Jun 2, 2022
77ac280
Added code to automatically compute shared vars in Vectorize.
luisenp Jun 2, 2022
31237da
Changed vectorized costs construction to ensure that their weight is …
luisenp Jun 2, 2022
d30e1af
Implemented main cost function vectorization logic.
luisenp Jun 6, 2022
36e89c7
Updated bug that was causing detached gradients.
luisenp Jun 6, 2022
376e8ef
Fixed invalid check in theseus end-to-end unit tests.
luisenp Jun 6, 2022
ae6db18
Added unit test for schema and shared var computation.
luisenp Jun 6, 2022
0a2ee0a
Added a test to check that computed vectorized errors are correct.
luisenp Jun 6, 2022
58cee83
Moved vectorization update call to base linearization class.
luisenp Jun 7, 2022
7e60f87
Changed code to allow batch_size > 1 in shared variables.
luisenp Jun 7, 2022
399bb90
Fixed unit test and added call to Objective.update() in update_vector…
luisenp Jun 7, 2022
10cbf1c
Added new private iterator for vectorized costs.
luisenp Jun 7, 2022
10b208a
Replaced _register_vars_in_list with TheseusFunction.register_vars.
luisenp Jun 9, 2022
db5f366
Renamed vectorize_cost_fns kwarg as vectorize.
luisenp Jun 9, 2022
bb83db3
Added license headers.
luisenp Jun 9, 2022
1d0cd20
Small refactor.
luisenp Jun 9, 2022
e902924
Fixed bug that was preventing vectorized costs to work with to(). End…
luisenp Jun 9, 2022
0ec439f
Renamed the private Objective cost function iterator to _get_iterator().
luisenp Jun 9, 2022
aab9ead
Renamed kwarg in register_vars.
luisenp Jun 9, 2022
e57f310
Set vectorize=True for inverse kinematics and backward tests.
luisenp Jun 9, 2022
d6a434f
Remove lingering comments.
luisenp Jun 9, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 3 additions & 16 deletions theseus/core/cost_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
# LICENSE file in the root directory of this source tree.

import abc
from typing import Any, Iterable, List, Optional, Tuple, cast
from typing import Any, List, Optional, Tuple, cast

import torch
import torch.autograd.functional as autogradF
Expand All @@ -17,19 +17,6 @@
from .variable import Variable


def _register_vars_in_list(
cost_fn: TheseusFunction, var_list_: Iterable[Variable], is_optim: bool = False
):
for var_ in var_list_:
if hasattr(cost_fn, var_.name):
raise RuntimeError(f"Variable name {var_.name} is not allowed.")
setattr(cost_fn, var_.name, var_)
if is_optim:
cost_fn.register_optim_var(var_.name)
else:
cost_fn.register_aux_var(var_.name)


# A cost function is defined by the variables interacting in it,
# and a cost weight for weighting errors and jacobians
# This is an abstract class for cost functions of different types.
Expand Down Expand Up @@ -128,8 +115,8 @@ def __init__(
raise ValueError(
"AutodiffCostFunction must receive at least one optimization variable."
)
_register_vars_in_list(self, optim_vars, is_optim=True)
_register_vars_in_list(self, aux_vars, is_optim=False)
self.register_vars(optim_vars, optim_vars=True)
mhmukadam marked this conversation as resolved.
Show resolved Hide resolved
self.register_vars(aux_vars, optim_vars=False)

self._err_fn = err_fn
self._dim = dim
Expand Down
10 changes: 10 additions & 0 deletions theseus/core/theseus_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,16 @@ def register_aux_vars(self, aux_var_names: Sequence[str]):
for name in aux_var_names:
self.register_aux_var(name)

def register_vars(self, vars: Iterable[Variable], optim_vars: bool = False):
for var_ in vars:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit: trailing underscore.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good catch. Changed.

if hasattr(self, var_.name):
raise RuntimeError(f"Variable name {var_.name} is not allowed.")
setattr(self, var_.name, var_)
if optim_vars:
self.register_optim_var(var_.name)
else:
self.register_aux_var(var_.name)

# Must copy everything
@abc.abstractmethod
def _copy_impl(self, new_name: Optional[str] = None) -> "TheseusFunction":
Expand Down
18 changes: 9 additions & 9 deletions theseus/core/vectorizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

import torch

from .cost_function import CostFunction, _register_vars_in_list
from .cost_function import CostFunction
from .objective import Objective
from .variable import Variable

Expand Down Expand Up @@ -35,8 +35,8 @@ def __init__(self, cost_fn: CostFunction, name: Optional[str] = None):
name = f"wrapper({cost_fn.name})"
super().__init__(cost_fn.weight, name=name)
self.cost_fn = cost_fn
_register_vars_in_list(self, cost_fn.optim_vars, is_optim=True)
_register_vars_in_list(self, cost_fn.aux_vars, is_optim=False)
self.register_vars(cost_fn.optim_vars, optim_vars=True)
self.register_vars(cost_fn.aux_vars, optim_vars=False)
self._cached_error: Optional[torch.Tensor] = None
self._cached_jacobians: Optional[List[torch.Tensor]] = None

Expand Down Expand Up @@ -74,10 +74,6 @@ def __repr__(self) -> str:

# This class replaces the Objective's iterator for one that takes advantage of
# cost function vectorization
# TODO:
# - Tests to add:
# + Test that vectorization results in correct costs
# + Vectorize variable update after NLOPT step
class Vectorize:
_SHARED_TOKEN = "__shared__"

Expand Down Expand Up @@ -157,8 +153,12 @@ def _get_name(var_idx, name_set_):
return info

@staticmethod
def _get_all_vars(cf) -> List[Variable]:
return list(cf.optim_vars) + list(cf.aux_vars) + list(cf.weight.aux_vars)
def _get_all_vars(cf: CostFunction) -> List[Variable]:
return (
list(cf.optim_vars)
+ list(cf.aux_vars) # type:ignore
+ list(cf.weight.aux_vars) # type:ignore
)

@staticmethod
def _expand(tensor: torch.Tensor, size: int) -> torch.Tensor:
Expand Down