Skip to content

Commit

Permalink
ability to make the entropy aux loss positive in lfq with a flag
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Jun 19, 2024
1 parent ea3b16d commit fc55a8c
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 2 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "vector-quantize-pytorch"
version = "1.14.28"
version = "1.14.29"
description = "Vector Quantization - Pytorch"
authors = [
{ name = "Phil Wang", email = "[email protected]" }
Expand Down
14 changes: 13 additions & 1 deletion vector_quantize_pytorch/lookup_free_quantization.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,9 @@ def __init__(
soft_clamp_input_value = None,
cosine_sim_project_in = False,
cosine_sim_project_in_scale = None,
channel_first = None
channel_first = None,
experimental_softplus_entropy_loss = False,
entropy_loss_offset = 5., # how much to shift the loss before softplus
):
super().__init__()

Expand Down Expand Up @@ -154,6 +156,11 @@ def __init__(
self.soft_clamp_input_value = soft_clamp_input_value
assert not exists(soft_clamp_input_value) or soft_clamp_input_value >= codebook_scale

# whether to make the entropy loss positive through a softplus (experimental, please report if this worked or not in discussions)

self.entropy_loss_offset = entropy_loss_offset
self.experimental_softplus_entropy_loss = experimental_softplus_entropy_loss

# for no auxiliary loss, during inference

self.register_buffer('mask', 2 ** torch.arange(codebook_dim - 1, -1, -1))
Expand Down Expand Up @@ -308,6 +315,11 @@ def forward(
# if not training, just return dummy 0
entropy_aux_loss = per_sample_entropy = codebook_entropy = self.zero

# whether to make the entropy loss positive or not through a (shifted) softplus

if self.training and self.experimental_softplus_entropy_loss:
entropy_aux_loss = F.softplus(entropy_aux_loss + self.entropy_loss_offset)

# commit loss

if self.training and self.commitment_loss_weight > 0.:
Expand Down

0 comments on commit fc55a8c

Please sign in to comment.