Skip to content

Commit

Permalink
address #160
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Sep 16, 2024
1 parent 54d29e8 commit 492e666
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 2 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "vector-quantize-pytorch"
version = "1.17.3"
version = "1.17.4"
description = "Vector Quantization - Pytorch"
authors = [
{ name = "Phil Wang", email = "[email protected]" }
Expand Down
5 changes: 4 additions & 1 deletion vector_quantize_pytorch/residual_vq.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,11 @@ def __init__(
self.quantize_dropout_multiple_of = quantize_dropout_multiple_of # encodec paper proposes structured dropout, believe this was set to 4

# setting up the MLPs for implicit neural codebooks

self.mlps = None

self.mlps = ModuleList([MLP(dim = codebook_dim, l2norm_output = first(self.layers).use_cosine_sim, **mlp_kwargs) for _ in range(num_quantizers - 1)])
if implicit_neural_codebook:
self.mlps = ModuleList([MLP(dim = codebook_dim, l2norm_output = first(self.layers).use_cosine_sim, **mlp_kwargs) for _ in range(num_quantizers - 1)])

# sharing codebook logic

Expand Down

0 comments on commit 492e666

Please sign in to comment.