| { | |
| "codebook_size": 512, | |
| "conv_type": "conv1d", | |
| "dec_act_func": "GLU", | |
| "dec_act_kwargs": { | |
| "dim": 1 | |
| }, | |
| "dim": 128, | |
| "enc_act_func": "LeakyReLU", | |
| "enc_act_kwargs": { | |
| "negative_slope": 0.1 | |
| }, | |
| "first_conv_kernel_size": 5, | |
| "group": 8, | |
| "in_channel": 3, | |
| "l2_recon_loss": true, | |
| "layer_mults": null, | |
| "layers": 2, | |
| "num_res_blocks": 1, | |
| "out_channel": 3, | |
| "quantizer": "VectorQuantize", | |
| "quantizer_kwargs": { | |
| "codebook_dim": 64, | |
| "commitment_weight": 0.25, | |
| "decay": 0.99, | |
| "heads": 8, | |
| "kmeans_init": true, | |
| "use_cosine_sim": true | |
| } | |
| } |