newbie-geek commited on
Commit
5f69f89
1 Parent(s): ffdef1c

Training in progress, step 50

Browse files
adapter_config.json CHANGED
@@ -1,6 +1,9 @@
1
  {
2
  "alpha_pattern": {},
3
- "auto_mapping": null,
 
 
 
4
  "base_model_name_or_path": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
@@ -9,8 +12,8 @@
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
  "loftq_config": {},
12
- "lora_alpha": 16,
13
- "lora_dropout": 0.05,
14
  "megatron_config": null,
15
  "megatron_core": "megatron.core",
16
  "modules_to_save": null,
@@ -22,6 +25,6 @@
22
  "q_proj",
23
  "v_proj"
24
  ],
25
- "task_type": "CAUSAL_LM",
26
  "use_rslora": false
27
  }
 
1
  {
2
  "alpha_pattern": {},
3
+ "auto_mapping": {
4
+ "base_model_class": "LlamaForCausalLM",
5
+ "parent_library": "transformers.models.llama.modeling_llama"
6
+ },
7
  "base_model_name_or_path": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
8
  "bias": "none",
9
  "fan_in_fan_out": false,
 
12
  "layers_pattern": null,
13
  "layers_to_transform": null,
14
  "loftq_config": {},
15
+ "lora_alpha": 32,
16
+ "lora_dropout": 0.1,
17
  "megatron_config": null,
18
  "megatron_core": "megatron.core",
19
  "modules_to_save": null,
 
25
  "q_proj",
26
  "v_proj"
27
  ],
28
+ "task_type": null,
29
  "use_rslora": false
30
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e44ce263e6fd885f50d82ca515b9325375b43ee36ededb75acf161ce88bc2e41
3
- size 48
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b886ecc4079f054a26b612fe5c84f9d13aacad6cf5b744ad887d18d09624612f
3
+ size 4517152
tokenizer.json CHANGED
@@ -1,7 +1,21 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
4
- "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 256,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
+ "padding": {
10
+ "strategy": {
11
+ "Fixed": 256
12
+ },
13
+ "direction": "Right",
14
+ "pad_to_multiple_of": null,
15
+ "pad_id": 2,
16
+ "pad_type_id": 0,
17
+ "pad_token": "</s>"
18
+ },
19
  "added_tokens": [
20
  {
21
  "id": 0,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3e19349e9efe69792aa4ddc765a9f1119ebefe6093a0bcc2d5920ada2566daba
3
  size 4792
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2002d52c0975e53eb12e90121942d878edec719dd2de4b9472221aaa9a88dee
3
  size 4792