vqvae_burgers_8_v2 / config.json
sogeeking's picture
Upload tokenizer
d5b6dcf
raw
history blame contribute delete
No virus
794 Bytes
{
"_commit_hash": null,
"architectures": [
"Tokenizer"
],
"channels": 1,
"code_dim": 128,
"codebook_size": 2048,
"disc_start": 2001,
"fsq_levels": [
7,
5,
5,
5,
5
],
"image_size": 256,
"init_dim": 64,
"layers": [
"residual",
"residual",
"compress_space",
"residual",
"compress_space",
"residual",
"compress_space",
"residual",
"compress_space",
"residual"
],
"model_type": "pdetokenizer",
"num_codebooks": 1,
"num_groups": 8,
"pad_mode": "circular",
"perceptual_loss_weight": 0,
"quantization_type": "lfq",
"temporal_compression": false,
"torch_dtype": "float32",
"transformers_version": null,
"use_batch_norm": false,
"use_gan": false,
"use_revin": false,
"use_style": false
}