Skip to content

Commit

Permalink
seems to work even better with a one layer mlp
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Nov 11, 2024
1 parent 949b0ba commit 3bb00f5
Showing 1 changed file with 10 additions and 2 deletions.
12 changes: 10 additions & 2 deletions examples/autoencoder_sim_vq.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,10 @@
train_iter = 10000
num_codes = 256
seed = 1234
rotation_trick = True

rotation_trick = True # rotation trick instead ot straight-through
use_mlp = True # use a one layer mlp with relu instead of linear

device = "cuda" if torch.cuda.is_available() else "cpu"

def SimVQAutoEncoder(**vq_kwargs):
Expand Down Expand Up @@ -77,7 +80,12 @@ def iterate_dataset(data_loader):

model = SimVQAutoEncoder(
codebook_size = num_codes,
rotation_trick = rotation_trick
rotation_trick = rotation_trick,
codebook_transform = nn.Sequential(
nn.Linear(32, 128),
nn.ReLU(),
nn.Linear(128, 32),
) if use_mlp else None
).to(device)

opt = torch.optim.AdamW(model.parameters(), lr=lr)
Expand Down

0 comments on commit 3bb00f5

Please sign in to comment.