erbacher commited on
Commit
b24f0e9
1 Parent(s): 33e8dee

Upload config

Browse files
Files changed (1) hide show
  1. config.json +2 -6
config.json CHANGED
@@ -1,11 +1,8 @@
1
  {
2
  "_commit_hash": null,
3
- "architectures": [
4
- "Tokenizer"
5
- ],
6
- "attn_dim_head": 64,
7
  "channels": 1,
8
- "codebook_size": 1024,
9
  "fsq_levels": [
10
  7,
11
  5,
@@ -38,7 +35,6 @@
38
  "perceptual_loss_weight": 0,
39
  "quantization_type": "vq",
40
  "temporal_compression": false,
41
- "torch_dtype": "float32",
42
  "transformers_version": null,
43
  "use_batch_norm": false,
44
  "use_gan": false,
 
1
  {
2
  "_commit_hash": null,
3
+ "attn_dim_head": 32,
 
 
 
4
  "channels": 1,
5
+ "codebook_size": 512,
6
  "fsq_levels": [
7
  7,
8
  5,
 
35
  "perceptual_loss_weight": 0,
36
  "quantization_type": "vq",
37
  "temporal_compression": false,
 
38
  "transformers_version": null,
39
  "use_batch_norm": false,
40
  "use_gan": false,