File size: 1,486 Bytes
723bacd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
{
  "pretrained_model_name_or_path": "E:/stable-diffusion-webui_db_22-11-22/stable-diffusion-webui_/models/Stable-diffusion/final-pruned.ckpt",
  "v2": false,
  "v_parameterization": false,
  "logging_dir": "",
  "train_data_dir": "C:\\\u56fe\u50cf\u5904\u7406\\Lora\\shenli-l",
  "reg_data_dir": "",
  "output_dir": "C:\\Users\\Administrator\\Desktop\\Lora-select\\lora\\\u795e\u91ccv3",
  "max_resolution": "768,768",
  "learning_rate": "1.5e-5",
  "lr_scheduler": "constant_with_warmup",
  "lr_warmup": "5",
  "train_batch_size": 3,
  "epoch": "10",
  "save_every_n_epochs": "1",
  "mixed_precision": "fp16",
  "save_precision": "fp16",
  "seed": "10086",
  "num_cpu_threads_per_process": 8,
  "cache_latents": true,
  "caption_extension": "",
  "enable_bucket": true,
  "gradient_checkpointing": false,
  "full_fp16": false,
  "no_token_padding": false,
  "stop_text_encoder_training": 0,
  "use_8bit_adam": true,
  "xformers": true,
  "save_model_as": "safetensors",
  "shuffle_caption": true,
  "save_state": false,
  "resume": "",
  "prior_loss_weight": 1.0,
  "text_encoder_lr": "1.5e-5",
  "unet_lr": "1.5e-4",
  "network_dim": 128,
  "lora_network_weights": "",
  "color_aug": false,
  "flip_aug": false,
  "clip_skip": 2,
  "gradient_accumulation_steps": 1.0,
  "mem_eff_attn": false,
  "output_name": "(v3)shenli_shenli-new_shenli-o_shenli-ss_shenli-br_shenli-jk",
  "model_list": "",
  "max_token_length": "75",
  "max_train_epochs": "",
  "max_data_loader_n_workers": ""
}