tiendung commited on
Commit
9bf92bc
1 Parent(s): 23b1ba5

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. config.json +6 -6
  2. job_new.json +0 -0
  3. measurement.json +0 -0
  4. output.safetensors +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "jinaai/qwen2-0.5b-reader",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
@@ -7,14 +7,14 @@
7
  "bos_token_id": 151643,
8
  "eos_token_id": 151645,
9
  "hidden_act": "silu",
10
- "hidden_size": 896,
11
  "initializer_range": 0.02,
12
- "intermediate_size": 4864,
13
  "max_position_embeddings": 256000,
14
- "max_window_layers": 24,
15
  "model_type": "qwen2",
16
- "num_attention_heads": 14,
17
- "num_hidden_layers": 24,
18
  "num_key_value_heads": 2,
19
  "rms_norm_eps": 1e-06,
20
  "rope_theta": 2000000,
 
1
  {
2
+ "_name_or_path": "jinaai/qwen2-1.5b-reader",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
 
7
  "bos_token_id": 151643,
8
  "eos_token_id": 151645,
9
  "hidden_act": "silu",
10
+ "hidden_size": 1536,
11
  "initializer_range": 0.02,
12
+ "intermediate_size": 8960,
13
  "max_position_embeddings": 256000,
14
+ "max_window_layers": 28,
15
  "model_type": "qwen2",
16
+ "num_attention_heads": 12,
17
+ "num_hidden_layers": 28,
18
  "num_key_value_heads": 2,
19
  "rms_norm_eps": 1e-06,
20
  "rope_theta": 2000000,
job_new.json CHANGED
The diff for this file is too large to render. See raw diff
 
measurement.json CHANGED
The diff for this file is too large to render. See raw diff
 
output.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a776f260fcc13dfc69e391a24a4f45f334a0be33eebae76d18409d0632ae552b
3
- size 742414034
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad773e10558edc17312c504c222e9cb148fbe19e75b9f6e98c7707baaae997dd
3
+ size 1959624040