Skip to content

Commit

Permalink
address #145 again
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Jul 3, 2024
1 parent 1bce1c3 commit 3505761
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 3 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "vector-quantize-pytorch"
version = "1.14.45"
version = "1.14.46"
description = "Vector Quantization - Pytorch"
authors = [
{ name = "Phil Wang", email = "[email protected]" }
Expand Down
11 changes: 9 additions & 2 deletions vector_quantize_pytorch/lookup_free_quantization.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def __init__(
bits = ((all_codes[..., None].int() & self.mask) != 0).float()
codebook = self.bits_to_codes(bits)

self.register_buffer('codebook', codebook, persistent = False)
self.register_buffer('codebook', codebook.float(), persistent = False)

def bits_to_codes(self, bits):
return bits * self.codebook_scale * 2 - self.codebook_scale
Expand Down Expand Up @@ -257,6 +257,7 @@ def forward(
c - number of codebook dim
"""

orig_dtype = x.dtype
x = x.float()

is_img_or_video = x.ndim >= 4
Expand Down Expand Up @@ -313,7 +314,7 @@ def forward(
# entropy aux loss

if self.training:
codebook = self.codebook
codebook = self.codebook.float()

codebook = self.maybe_l2norm(codebook)

Expand Down Expand Up @@ -403,6 +404,12 @@ def forward(

aux_loss = entropy_aux_loss * self.entropy_loss_weight + commit_loss * self.commitment_loss_weight

# restore original dtype

x = x.type(orig_dtype)

# returns

ret = Return(x, indices, aux_loss)

if not return_loss_breakdown:
Expand Down

0 comments on commit 3505761

Please sign in to comment.