| import torch, torch.nn as nn | |
| from vector_quantize_pytorch import ResidualVQ | |
| class RVQWrapper(nn.Module): | |
| def __init__(self, dim, num_quantizers, codebook_size): | |
| super().__init__() | |
| self.proj_in = nn.Linear(dim, dim, bias=True) | |
| self.rvq = ResidualVQ(dim=dim, num_quantizers=num_quantizers, codebook_size=codebook_size) | |
| self.proj_out = nn.Linear(dim, dim, bias=True) | |
| self.register_buffer("ema_counts", torch.zeros(num_quantizers, codebook_size)) | |
| def forward(self, x): | |
| x = self.proj_in(x) | |
| y, indices, commit = self.rvq(x) | |
| y = self.proj_out(y) | |
| return y, indices, commit | |