prabhuat commited on
Commit
b374812
·
verified ·
1 Parent(s): 239c0f9

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,56 +1,16 @@
1
  ---
2
- license: apache-2.0
3
- datasets:
4
- - CycloneDX/cdx-docs
5
  language:
6
  - en
7
- base_model:
8
- - unsloth/Qwen2.5-Coder-14B-Instruct
9
  library_name: mlx
 
10
  tags:
11
- - cyclonedx
12
- - cdxgen
13
- - sbom
14
- - security
15
- - purl
16
- - obom
17
- - ml-bom
18
- - cbom
19
  ---
20
-
21
- **PREVIEW RELEASE**
22
-
23
- ## Testing with LM Studio
24
-
25
- Use [LM Studio](https://lmstudio.ai/docs/basics/download-model) to download and test this model. Search for `CycloneDX/cdx1-mlx` (Full version) or `CycloneDX/cdx1-mlx-8bit` (Recommended).
26
-
27
- Use the below configurations:
28
-
29
- ```
30
- System Prompt: `You are a helpful assistant to the user.` Use Prompt [Template](https://lmstudio.ai/docs/advanced/prompt-template).
31
- Temperature: 0.05
32
- Max tokens: 16000 or 32000
33
- Context length: 16000
34
- ```
35
-
36
- ## Testing with mlx
37
-
38
- Install [miniconda](https://docs.anaconda.com/miniconda/install/) or Python 3.12
39
-
40
- ```
41
- conda create --name cdx1-mlx python=3.12
42
- conda activate cdx1-mlx
43
- pip install mlx
44
- ```
45
-
46
- LLM Inference from the CLI.
47
-
48
- ```shell
49
- mlx_lm.generate --model CycloneDX/cdx1-mlx --system-prompt "You are a helpful assistant to the user." --prompt "tell me about cdxgen" --temp 0.05
50
- ```
51
-
52
- Use the 8 bit version for better speed and performance.
53
-
54
- ```shell
55
- mlx_lm.generate --model CycloneDX/cdx1-mlx-8bit --system-prompt "You are a helpful assistant to the user." --prompt "tell me about cdxgen" --temp 0.05
56
- ```
 
1
  ---
2
+ base_model: unsloth/Qwen2.5-Coder-14B-Instruct
 
 
3
  language:
4
  - en
 
 
5
  library_name: mlx
6
+ license: apache-2.0
7
  tags:
8
+ - unsloth
9
+ - transformers
10
+ - code
11
+ - qwen
12
+ - qwen-coder
13
+ - codeqwen
14
+ - mlx
15
+ pipeline_tag: text-generation
16
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
added_tokens.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|PAD_TOKEN|>": 151665,
5
+ "<|box_end|>": 151649,
6
+ "<|box_start|>": 151648,
7
+ "<|endoftext|>": 151643,
8
+ "<|file_sep|>": 151664,
9
+ "<|fim_middle|>": 151660,
10
+ "<|fim_pad|>": 151662,
11
+ "<|fim_prefix|>": 151659,
12
+ "<|fim_suffix|>": 151661,
13
+ "<|im_end|>": 151645,
14
+ "<|im_start|>": 151644,
15
+ "<|image_pad|>": 151655,
16
+ "<|object_ref_end|>": 151647,
17
+ "<|object_ref_start|>": 151646,
18
+ "<|quad_end|>": 151651,
19
+ "<|quad_start|>": 151650,
20
+ "<|repo_name|>": 151663,
21
+ "<|video_pad|>": 151656,
22
+ "<|vision_end|>": 151653,
23
+ "<|vision_pad|>": 151654,
24
+ "<|vision_start|>": 151652
25
+ }
chat_template.jinja ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- if tools %}
2
+ {{- '<|im_start|>system\n' }}
3
+ {%- if messages[0]['role'] == 'system' %}
4
+ {{- messages[0]['content'] }}
5
+ {%- else %}
6
+ {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}
7
+ {%- endif %}
8
+ {{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
9
+ {%- for tool in tools %}
10
+ {{- "\n" }}
11
+ {{- tool | tojson }}
12
+ {%- endfor %}
13
+ {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
14
+ {%- else %}
15
+ {%- if messages[0]['role'] == 'system' %}
16
+ {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
17
+ {%- else %}
18
+ {{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }}
19
+ {%- endif %}
20
+ {%- endif %}
21
+ {%- for message in messages %}
22
+ {%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %}
23
+ {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}
24
+ {%- elif message.role == "assistant" %}
25
+ {{- '<|im_start|>' + message.role }}
26
+ {%- if message.content %}
27
+ {{- '\n' + message.content }}
28
+ {%- endif %}
29
+ {%- for tool_call in message.tool_calls %}
30
+ {%- if tool_call.function is defined %}
31
+ {%- set tool_call = tool_call.function %}
32
+ {%- endif %}
33
+ {{- '\n<tool_call>\n{"name": "' }}
34
+ {{- tool_call.name }}
35
+ {{- '", "arguments": ' }}
36
+ {{- tool_call.arguments | tojson }}
37
+ {{- '}\n</tool_call>' }}
38
+ {%- endfor %}
39
+ {{- '<|im_end|>\n' }}
40
+ {%- elif message.role == "tool" %}
41
+ {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %}
42
+ {{- '<|im_start|>user' }}
43
+ {%- endif %}
44
+ {{- '\n<tool_response>\n' }}
45
+ {{- message.content }}
46
+ {{- '\n</tool_response>' }}
47
+ {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
48
+ {{- '<|im_end|>\n' }}
49
+ {%- endif %}
50
+ {%- endif %}
51
+ {%- endfor %}
52
+ {%- if add_generation_prompt %}
53
+ {{- '<|im_start|>assistant\n' }}
54
+ {%- endif %}
config.json CHANGED
@@ -1,31 +1,29 @@
1
  {
2
  "architectures": [
3
- "LlamaForCausalLM"
4
  ],
5
- "attention_bias": false,
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 100257,
8
- "eos_token_id": 100265,
9
- "head_dim": 128,
10
  "hidden_act": "silu",
11
  "hidden_size": 5120,
12
  "initializer_range": 0.02,
13
- "intermediate_size": 17920,
14
- "max_position_embeddings": 16384,
15
- "mlp_bias": false,
16
- "model_type": "llama",
17
  "num_attention_heads": 40,
18
- "num_hidden_layers": 40,
19
- "num_key_value_heads": 10,
20
- "original_max_position_embeddings": 16384,
21
- "pad_token_id": 100351,
22
- "pretraining_tp": 1,
23
- "rms_norm_eps": 1e-05,
24
- "rope_scaling": null,
25
- "rope_theta": 250000,
26
  "tie_word_embeddings": false,
27
  "torch_dtype": "bfloat16",
28
- "transformers_version": "4.47.1",
 
29
  "use_cache": true,
30
- "vocab_size": 100352
 
31
  }
 
1
  {
2
  "architectures": [
3
+ "Qwen2ForCausalLM"
4
  ],
 
5
  "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "eos_token_id": 151645,
 
8
  "hidden_act": "silu",
9
  "hidden_size": 5120,
10
  "initializer_range": 0.02,
11
+ "intermediate_size": 13824,
12
+ "max_position_embeddings": 32768,
13
+ "max_window_layers": 70,
14
+ "model_type": "qwen2",
15
  "num_attention_heads": 40,
16
+ "num_hidden_layers": 48,
17
+ "num_key_value_heads": 8,
18
+ "pad_token_id": 151665,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": null,
 
 
22
  "tie_word_embeddings": false,
23
  "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.44.2",
25
+ "unsloth_fixed": true,
26
  "use_cache": true,
27
+ "use_sliding_window": false,
28
+ "vocab_size": 152064
29
  }
generation_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "max_length": 32768,
9
+ "pad_token_id": 151665,
10
+ "repetition_penalty": 1.05,
11
+ "temperature": 0.7,
12
+ "top_k": 20,
13
+ "top_p": 0.8,
14
+ "transformers_version": "4.44.2"
15
+ }
merges.txt CHANGED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:885edaf6ff1f34851691ce41248813760645b9720a4f347cb06017e7774191de
3
- size 5248252582
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78ce05411a483549f826255aad228743f26f6f980f3092f97658ea3efea6f0fd
3
+ size 5269326976
model-00002-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:18d497f7cfea1939ae869e5db3e6c9c5ee0464b6c5b8107e5adf42618b7d6ed9
3
- size 5321694956
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c57e446239a0b0300b6ac48fb5ed1b4f27887bf23bb9500c9225c1db00957d9
3
+ size 5363828073
model-00003-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:543d2532beeac4b1d20fd094778306a3f756033244325322306d308246769f9d
3
- size 5269245825
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87a16207f90e2fe13a5346aa7c0018dc2a692372731c8453b1e40438956118d1
3
+ size 5363828102
model-00004-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:952b38c08240be4109358bb1f529c46eec7cc7eb5ca4c99cbbe3dd009250ee3e
3
- size 5269266538
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cbc8790bb8a6ac2780c22b2161533a7adcf9141190757b6a55178f8655167957
3
+ size 5237963142
model-00005-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d34a75a6526b898527a5ac22d4a5a63243c841f5ab90d1fb6f87a3eaa468be6f
3
- size 5269266536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b90ffe283cbbae545160eb4843d7d5e06156f93dc913499c71f258ee994c2e73
3
+ size 5363828102
model-00006-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:322e14ab9528ecc814cf162103b9f12ae0cda663f461c87f2b17a5e0a3957af2
3
- size 2941330259
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d61ef902a921a72e37beb8f7759aec03d36700a6b1ee9f47c01438386cad3016
3
+ size 2941359481
model.safetensors.index.json CHANGED
@@ -1,6 +1,7 @@
1
  {
2
  "metadata": {
3
- "total_size": 29319014400
 
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00006-of-00006.safetensors",
@@ -10,360 +11,576 @@
10
  "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
11
  "model.layers.0.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
12
  "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
 
13
  "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
14
  "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
 
15
  "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
 
16
  "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
17
  "model.layers.1.input_layernorm.weight": "model-00001-of-00006.safetensors",
18
  "model.layers.1.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
19
  "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
20
  "model.layers.1.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
21
  "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
 
22
  "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
23
  "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
 
24
  "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
 
25
  "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
26
  "model.layers.10.input_layernorm.weight": "model-00002-of-00006.safetensors",
27
  "model.layers.10.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
28
  "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
29
  "model.layers.10.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
30
  "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
 
31
  "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
32
  "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
 
33
  "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
 
34
  "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
35
  "model.layers.11.input_layernorm.weight": "model-00002-of-00006.safetensors",
36
  "model.layers.11.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
37
  "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
38
  "model.layers.11.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
39
  "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
 
40
  "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
41
  "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
 
42
  "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
 
43
  "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
44
  "model.layers.12.input_layernorm.weight": "model-00002-of-00006.safetensors",
45
  "model.layers.12.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
46
  "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
47
  "model.layers.12.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
48
  "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
 
49
  "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
50
  "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
 
51
  "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
 
52
  "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
53
  "model.layers.13.input_layernorm.weight": "model-00002-of-00006.safetensors",
54
  "model.layers.13.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
55
  "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
56
  "model.layers.13.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
57
  "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
 
58
  "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
59
  "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
 
60
  "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
 
61
  "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
62
- "model.layers.14.input_layernorm.weight": "model-00003-of-00006.safetensors",
63
- "model.layers.14.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
64
- "model.layers.14.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
65
- "model.layers.14.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
66
- "model.layers.14.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
67
- "model.layers.14.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
68
- "model.layers.14.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
69
- "model.layers.14.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
70
- "model.layers.14.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
71
- "model.layers.15.input_layernorm.weight": "model-00003-of-00006.safetensors",
72
- "model.layers.15.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
73
- "model.layers.15.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
74
- "model.layers.15.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
75
- "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
76
- "model.layers.15.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
77
- "model.layers.15.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
78
- "model.layers.15.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
79
- "model.layers.15.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
 
 
 
 
 
 
80
  "model.layers.16.input_layernorm.weight": "model-00003-of-00006.safetensors",
81
  "model.layers.16.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
82
- "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
83
  "model.layers.16.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
84
  "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
85
- "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
86
- "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
87
- "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
88
- "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
 
 
 
89
  "model.layers.17.input_layernorm.weight": "model-00003-of-00006.safetensors",
90
  "model.layers.17.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
91
  "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
92
  "model.layers.17.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
93
  "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
 
94
  "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
95
  "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
 
96
  "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
 
97
  "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
98
  "model.layers.18.input_layernorm.weight": "model-00003-of-00006.safetensors",
99
  "model.layers.18.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
100
  "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
101
  "model.layers.18.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
102
  "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
 
103
  "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
104
  "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
 
105
  "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
 
106
  "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
107
  "model.layers.19.input_layernorm.weight": "model-00003-of-00006.safetensors",
108
  "model.layers.19.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
109
  "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
110
  "model.layers.19.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
111
  "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
 
112
  "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
113
  "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
 
114
  "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
 
115
  "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
116
  "model.layers.2.input_layernorm.weight": "model-00001-of-00006.safetensors",
117
  "model.layers.2.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
118
  "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
119
  "model.layers.2.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
120
  "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
 
121
  "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
122
  "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
 
123
  "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
 
124
  "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
125
  "model.layers.20.input_layernorm.weight": "model-00003-of-00006.safetensors",
126
  "model.layers.20.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
127
  "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
128
  "model.layers.20.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
129
  "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
 
130
  "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
131
  "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
 
132
  "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
 
133
  "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
134
- "model.layers.21.input_layernorm.weight": "model-00004-of-00006.safetensors",
135
  "model.layers.21.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
136
  "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
137
- "model.layers.21.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
138
- "model.layers.21.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
 
139
  "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
140
  "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
 
141
  "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
 
142
  "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
143
- "model.layers.22.input_layernorm.weight": "model-00004-of-00006.safetensors",
144
- "model.layers.22.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
145
- "model.layers.22.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
146
- "model.layers.22.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
147
- "model.layers.22.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
148
- "model.layers.22.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
149
- "model.layers.22.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
150
- "model.layers.22.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
151
- "model.layers.22.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
152
- "model.layers.23.input_layernorm.weight": "model-00004-of-00006.safetensors",
153
- "model.layers.23.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
154
- "model.layers.23.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
155
- "model.layers.23.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
156
- "model.layers.23.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
157
- "model.layers.23.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
158
- "model.layers.23.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
159
- "model.layers.23.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
160
- "model.layers.23.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
161
- "model.layers.24.input_layernorm.weight": "model-00004-of-00006.safetensors",
162
- "model.layers.24.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
163
- "model.layers.24.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
164
- "model.layers.24.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
165
- "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
166
- "model.layers.24.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
167
- "model.layers.24.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
168
- "model.layers.24.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
169
- "model.layers.24.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
170
- "model.layers.25.input_layernorm.weight": "model-00004-of-00006.safetensors",
171
- "model.layers.25.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
172
- "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
173
- "model.layers.25.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
174
- "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
175
- "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
176
- "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
177
- "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
178
- "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
 
 
 
 
 
 
 
 
 
 
 
 
179
  "model.layers.26.input_layernorm.weight": "model-00004-of-00006.safetensors",
180
  "model.layers.26.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
181
  "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
182
  "model.layers.26.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
183
  "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
184
- "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
185
- "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
186
- "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
187
- "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
 
 
 
188
  "model.layers.27.input_layernorm.weight": "model-00004-of-00006.safetensors",
189
  "model.layers.27.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
190
  "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
191
  "model.layers.27.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
192
  "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
 
193
  "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
194
  "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
 
195
  "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
 
196
  "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
197
  "model.layers.28.input_layernorm.weight": "model-00004-of-00006.safetensors",
198
  "model.layers.28.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
199
  "model.layers.28.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
200
  "model.layers.28.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
201
  "model.layers.28.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
 
202
  "model.layers.28.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
203
  "model.layers.28.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
 
204
  "model.layers.28.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
 
205
  "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
206
- "model.layers.29.input_layernorm.weight": "model-00005-of-00006.safetensors",
207
- "model.layers.29.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
208
  "model.layers.29.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
209
- "model.layers.29.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
210
- "model.layers.29.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
 
211
  "model.layers.29.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
212
  "model.layers.29.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
 
213
  "model.layers.29.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
 
214
  "model.layers.29.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
215
  "model.layers.3.input_layernorm.weight": "model-00001-of-00006.safetensors",
216
  "model.layers.3.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
217
  "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
218
  "model.layers.3.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
219
  "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
 
220
  "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
221
  "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
 
222
  "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
 
223
  "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
224
- "model.layers.30.input_layernorm.weight": "model-00005-of-00006.safetensors",
225
- "model.layers.30.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
226
- "model.layers.30.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
227
- "model.layers.30.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
228
- "model.layers.30.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
229
- "model.layers.30.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
230
- "model.layers.30.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
231
- "model.layers.30.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
232
- "model.layers.30.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
233
- "model.layers.31.input_layernorm.weight": "model-00005-of-00006.safetensors",
234
- "model.layers.31.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
235
- "model.layers.31.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
236
- "model.layers.31.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
237
- "model.layers.31.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
238
- "model.layers.31.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
239
- "model.layers.31.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
240
- "model.layers.31.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
241
- "model.layers.31.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
242
- "model.layers.32.input_layernorm.weight": "model-00005-of-00006.safetensors",
243
- "model.layers.32.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
244
- "model.layers.32.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
245
- "model.layers.32.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
246
- "model.layers.32.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
247
- "model.layers.32.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
248
- "model.layers.32.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
249
- "model.layers.32.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
250
- "model.layers.32.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
251
- "model.layers.33.input_layernorm.weight": "model-00005-of-00006.safetensors",
252
- "model.layers.33.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
253
- "model.layers.33.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
254
- "model.layers.33.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
255
- "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
256
- "model.layers.33.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
257
- "model.layers.33.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
258
- "model.layers.33.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
259
- "model.layers.33.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
260
- "model.layers.34.input_layernorm.weight": "model-00005-of-00006.safetensors",
261
- "model.layers.34.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
262
- "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
263
- "model.layers.34.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
264
- "model.layers.34.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
265
- "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
266
- "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
267
- "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
268
- "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
269
  "model.layers.35.input_layernorm.weight": "model-00005-of-00006.safetensors",
270
- "model.layers.35.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
271
- "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
272
  "model.layers.35.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
273
  "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
274
- "model.layers.35.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
275
- "model.layers.35.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
276
- "model.layers.35.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
277
- "model.layers.35.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
 
 
 
278
  "model.layers.36.input_layernorm.weight": "model-00005-of-00006.safetensors",
279
  "model.layers.36.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
280
  "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
281
  "model.layers.36.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
282
  "model.layers.36.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
 
283
  "model.layers.36.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
284
  "model.layers.36.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
 
285
  "model.layers.36.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
 
286
  "model.layers.36.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
287
- "model.layers.37.input_layernorm.weight": "model-00006-of-00006.safetensors",
288
- "model.layers.37.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
289
- "model.layers.37.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
290
- "model.layers.37.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
291
- "model.layers.37.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
 
292
  "model.layers.37.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
293
  "model.layers.37.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
 
294
  "model.layers.37.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
 
295
  "model.layers.37.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
296
- "model.layers.38.input_layernorm.weight": "model-00006-of-00006.safetensors",
297
- "model.layers.38.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
298
- "model.layers.38.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
299
- "model.layers.38.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
300
- "model.layers.38.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
301
- "model.layers.38.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
302
- "model.layers.38.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
303
- "model.layers.38.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
304
- "model.layers.38.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
305
- "model.layers.39.input_layernorm.weight": "model-00006-of-00006.safetensors",
306
- "model.layers.39.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
307
- "model.layers.39.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
308
- "model.layers.39.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
309
- "model.layers.39.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
310
- "model.layers.39.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
311
- "model.layers.39.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
312
- "model.layers.39.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
313
- "model.layers.39.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
 
 
 
 
 
 
314
  "model.layers.4.input_layernorm.weight": "model-00001-of-00006.safetensors",
315
  "model.layers.4.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
316
  "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
317
  "model.layers.4.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
318
  "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
 
319
  "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
320
  "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
 
321
  "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
 
322
  "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
323
  "model.layers.5.input_layernorm.weight": "model-00001-of-00006.safetensors",
324
  "model.layers.5.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
325
  "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
326
  "model.layers.5.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
327
  "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
 
328
  "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
329
  "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
 
330
  "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
 
331
  "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
332
  "model.layers.6.input_layernorm.weight": "model-00002-of-00006.safetensors",
333
- "model.layers.6.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
334
- "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
335
  "model.layers.6.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
336
  "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
 
337
  "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
338
  "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
 
339
  "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
 
340
  "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
341
  "model.layers.7.input_layernorm.weight": "model-00002-of-00006.safetensors",
342
  "model.layers.7.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
343
  "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
344
  "model.layers.7.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
345
  "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
 
346
  "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
347
  "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
 
348
  "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
 
349
  "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
350
  "model.layers.8.input_layernorm.weight": "model-00002-of-00006.safetensors",
351
  "model.layers.8.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
352
  "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
353
  "model.layers.8.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
354
  "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
 
355
  "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
356
  "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
 
357
  "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
 
358
  "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
359
  "model.layers.9.input_layernorm.weight": "model-00002-of-00006.safetensors",
360
  "model.layers.9.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
361
  "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
362
  "model.layers.9.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
363
  "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
 
364
  "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
365
  "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
 
366
  "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
 
367
  "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
368
  "model.norm.weight": "model-00006-of-00006.safetensors"
369
  }
 
1
  {
2
  "metadata": {
3
+ "total_size": 29540067328,
4
+ "total_parameters": 14770033664
5
  },
6
  "weight_map": {
7
  "lm_head.weight": "model-00006-of-00006.safetensors",
 
11
  "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
12
  "model.layers.0.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
13
  "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
14
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
15
  "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
16
  "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
17
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
18
  "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
19
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
20
  "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
21
  "model.layers.1.input_layernorm.weight": "model-00001-of-00006.safetensors",
22
  "model.layers.1.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
23
  "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
24
  "model.layers.1.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
25
  "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
26
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
27
  "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
28
  "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
29
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
30
  "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
31
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
32
  "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
33
  "model.layers.10.input_layernorm.weight": "model-00002-of-00006.safetensors",
34
  "model.layers.10.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
35
  "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
36
  "model.layers.10.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
37
  "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
38
+ "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
39
  "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
40
  "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
41
+ "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
42
  "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
43
+ "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
44
  "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
45
  "model.layers.11.input_layernorm.weight": "model-00002-of-00006.safetensors",
46
  "model.layers.11.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
47
  "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
48
  "model.layers.11.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
49
  "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
50
+ "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
51
  "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
52
  "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
53
+ "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
54
  "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
55
+ "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
56
  "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
57
  "model.layers.12.input_layernorm.weight": "model-00002-of-00006.safetensors",
58
  "model.layers.12.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
59
  "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
60
  "model.layers.12.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
61
  "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
62
+ "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
63
  "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
64
  "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
65
+ "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
66
  "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
67
+ "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
68
  "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
69
  "model.layers.13.input_layernorm.weight": "model-00002-of-00006.safetensors",
70
  "model.layers.13.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
71
  "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
72
  "model.layers.13.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
73
  "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
74
+ "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
75
  "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
76
  "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
77
+ "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
78
  "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
79
+ "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
80
  "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
81
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00006.safetensors",
82
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
83
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
84
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
85
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
86
+ "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
87
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
88
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
89
+ "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
90
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
91
+ "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
92
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
93
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00006.safetensors",
94
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
95
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
96
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
97
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
98
+ "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
99
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
100
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
101
+ "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
102
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
103
+ "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
104
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
105
  "model.layers.16.input_layernorm.weight": "model-00003-of-00006.safetensors",
106
  "model.layers.16.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
107
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
108
  "model.layers.16.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
109
  "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
110
+ "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
111
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
112
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
113
+ "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
114
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
115
+ "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
116
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
117
  "model.layers.17.input_layernorm.weight": "model-00003-of-00006.safetensors",
118
  "model.layers.17.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
119
  "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
120
  "model.layers.17.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
121
  "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
122
+ "model.layers.17.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
123
  "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
124
  "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
125
+ "model.layers.17.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
126
  "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
127
+ "model.layers.17.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
128
  "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
129
  "model.layers.18.input_layernorm.weight": "model-00003-of-00006.safetensors",
130
  "model.layers.18.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
131
  "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
132
  "model.layers.18.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
133
  "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
134
+ "model.layers.18.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
135
  "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
136
  "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
137
+ "model.layers.18.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
138
  "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
139
+ "model.layers.18.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
140
  "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
141
  "model.layers.19.input_layernorm.weight": "model-00003-of-00006.safetensors",
142
  "model.layers.19.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
143
  "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
144
  "model.layers.19.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
145
  "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
146
+ "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
147
  "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
148
  "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
149
+ "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
150
  "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
151
+ "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
152
  "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
153
  "model.layers.2.input_layernorm.weight": "model-00001-of-00006.safetensors",
154
  "model.layers.2.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
155
  "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
156
  "model.layers.2.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
157
  "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
158
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
159
  "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
160
  "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
161
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
162
  "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
163
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
164
  "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
165
  "model.layers.20.input_layernorm.weight": "model-00003-of-00006.safetensors",
166
  "model.layers.20.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
167
  "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
168
  "model.layers.20.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
169
  "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
170
+ "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
171
  "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
172
  "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
173
+ "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
174
  "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
175
+ "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
176
  "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
177
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00006.safetensors",
178
  "model.layers.21.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
179
  "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
180
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
181
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
182
+ "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
183
  "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
184
  "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
185
+ "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
186
  "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
187
+ "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
188
  "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
189
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00006.safetensors",
190
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
191
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
192
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
193
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
194
+ "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
195
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
196
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
197
+ "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
198
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
199
+ "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
200
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
201
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00006.safetensors",
202
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
203
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
204
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
205
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
206
+ "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
207
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
208
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
209
+ "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
210
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
211
+ "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
212
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
213
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00006.safetensors",
214
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
215
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
216
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
217
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
218
+ "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
219
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
220
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
221
+ "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
222
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
223
+ "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
224
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
225
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00006.safetensors",
226
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
227
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
228
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
229
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
230
+ "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
231
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
232
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
233
+ "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
234
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
235
+ "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
236
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
237
  "model.layers.26.input_layernorm.weight": "model-00004-of-00006.safetensors",
238
  "model.layers.26.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
239
  "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
240
  "model.layers.26.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
241
  "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
242
+ "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
243
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
244
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
245
+ "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
246
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
247
+ "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
248
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
249
  "model.layers.27.input_layernorm.weight": "model-00004-of-00006.safetensors",
250
  "model.layers.27.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
251
  "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
252
  "model.layers.27.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
253
  "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
254
+ "model.layers.27.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
255
  "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
256
  "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
257
+ "model.layers.27.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
258
  "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
259
+ "model.layers.27.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
260
  "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
261
  "model.layers.28.input_layernorm.weight": "model-00004-of-00006.safetensors",
262
  "model.layers.28.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
263
  "model.layers.28.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
264
  "model.layers.28.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
265
  "model.layers.28.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
266
+ "model.layers.28.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
267
  "model.layers.28.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
268
  "model.layers.28.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
269
+ "model.layers.28.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
270
  "model.layers.28.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
271
+ "model.layers.28.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
272
  "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
273
+ "model.layers.29.input_layernorm.weight": "model-00004-of-00006.safetensors",
274
+ "model.layers.29.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
275
  "model.layers.29.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
276
+ "model.layers.29.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
277
+ "model.layers.29.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
278
+ "model.layers.29.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
279
  "model.layers.29.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
280
  "model.layers.29.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
281
+ "model.layers.29.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
282
  "model.layers.29.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
283
+ "model.layers.29.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
284
  "model.layers.29.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
285
  "model.layers.3.input_layernorm.weight": "model-00001-of-00006.safetensors",
286
  "model.layers.3.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
287
  "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
288
  "model.layers.3.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
289
  "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
290
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
291
  "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
292
  "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
293
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
294
  "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
295
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
296
  "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
297
+ "model.layers.30.input_layernorm.weight": "model-00004-of-00006.safetensors",
298
+ "model.layers.30.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
299
+ "model.layers.30.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
300
+ "model.layers.30.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
301
+ "model.layers.30.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
302
+ "model.layers.30.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
303
+ "model.layers.30.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
304
+ "model.layers.30.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
305
+ "model.layers.30.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
306
+ "model.layers.30.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
307
+ "model.layers.30.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
308
+ "model.layers.30.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
309
+ "model.layers.31.input_layernorm.weight": "model-00004-of-00006.safetensors",
310
+ "model.layers.31.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
311
+ "model.layers.31.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
312
+ "model.layers.31.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
313
+ "model.layers.31.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
314
+ "model.layers.31.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
315
+ "model.layers.31.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
316
+ "model.layers.31.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
317
+ "model.layers.31.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
318
+ "model.layers.31.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
319
+ "model.layers.31.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
320
+ "model.layers.31.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
321
+ "model.layers.32.input_layernorm.weight": "model-00004-of-00006.safetensors",
322
+ "model.layers.32.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
323
+ "model.layers.32.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
324
+ "model.layers.32.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
325
+ "model.layers.32.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
326
+ "model.layers.32.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
327
+ "model.layers.32.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
328
+ "model.layers.32.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
329
+ "model.layers.32.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
330
+ "model.layers.32.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
331
+ "model.layers.32.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
332
+ "model.layers.32.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
333
+ "model.layers.33.input_layernorm.weight": "model-00004-of-00006.safetensors",
334
+ "model.layers.33.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
335
+ "model.layers.33.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
336
+ "model.layers.33.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
337
+ "model.layers.33.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
338
+ "model.layers.33.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
339
+ "model.layers.33.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
340
+ "model.layers.33.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
341
+ "model.layers.33.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
342
+ "model.layers.33.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
343
+ "model.layers.33.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
344
+ "model.layers.33.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
345
+ "model.layers.34.input_layernorm.weight": "model-00004-of-00006.safetensors",
346
+ "model.layers.34.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
347
+ "model.layers.34.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
348
+ "model.layers.34.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
349
+ "model.layers.34.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
350
+ "model.layers.34.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
351
+ "model.layers.34.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
352
+ "model.layers.34.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
353
+ "model.layers.34.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
354
+ "model.layers.34.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
355
+ "model.layers.34.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
356
+ "model.layers.34.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
357
  "model.layers.35.input_layernorm.weight": "model-00005-of-00006.safetensors",
358
+ "model.layers.35.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
359
+ "model.layers.35.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
360
  "model.layers.35.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
361
  "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
362
+ "model.layers.35.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
363
+ "model.layers.35.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
364
+ "model.layers.35.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
365
+ "model.layers.35.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
366
+ "model.layers.35.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
367
+ "model.layers.35.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
368
+ "model.layers.35.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
369
  "model.layers.36.input_layernorm.weight": "model-00005-of-00006.safetensors",
370
  "model.layers.36.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
371
  "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
372
  "model.layers.36.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
373
  "model.layers.36.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
374
+ "model.layers.36.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
375
  "model.layers.36.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
376
  "model.layers.36.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
377
+ "model.layers.36.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
378
  "model.layers.36.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
379
+ "model.layers.36.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
380
  "model.layers.36.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
381
+ "model.layers.37.input_layernorm.weight": "model-00005-of-00006.safetensors",
382
+ "model.layers.37.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
383
+ "model.layers.37.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
384
+ "model.layers.37.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
385
+ "model.layers.37.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
386
+ "model.layers.37.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
387
  "model.layers.37.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
388
  "model.layers.37.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
389
+ "model.layers.37.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
390
  "model.layers.37.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
391
+ "model.layers.37.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
392
  "model.layers.37.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
393
+ "model.layers.38.input_layernorm.weight": "model-00005-of-00006.safetensors",
394
+ "model.layers.38.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
395
+ "model.layers.38.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
396
+ "model.layers.38.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
397
+ "model.layers.38.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
398
+ "model.layers.38.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
399
+ "model.layers.38.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
400
+ "model.layers.38.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
401
+ "model.layers.38.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
402
+ "model.layers.38.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
403
+ "model.layers.38.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
404
+ "model.layers.38.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
405
+ "model.layers.39.input_layernorm.weight": "model-00005-of-00006.safetensors",
406
+ "model.layers.39.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
407
+ "model.layers.39.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
408
+ "model.layers.39.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
409
+ "model.layers.39.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
410
+ "model.layers.39.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
411
+ "model.layers.39.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
412
+ "model.layers.39.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
413
+ "model.layers.39.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
414
+ "model.layers.39.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
415
+ "model.layers.39.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
416
+ "model.layers.39.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
417
  "model.layers.4.input_layernorm.weight": "model-00001-of-00006.safetensors",
418
  "model.layers.4.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
419
  "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
420
  "model.layers.4.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
421
  "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
422
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
423
  "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
424
  "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
425
+ "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
426
  "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
427
+ "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
428
  "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
429
+ "model.layers.40.input_layernorm.weight": "model-00005-of-00006.safetensors",
430
+ "model.layers.40.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
431
+ "model.layers.40.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
432
+ "model.layers.40.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
433
+ "model.layers.40.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
434
+ "model.layers.40.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
435
+ "model.layers.40.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
436
+ "model.layers.40.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
437
+ "model.layers.40.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
438
+ "model.layers.40.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
439
+ "model.layers.40.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
440
+ "model.layers.40.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
441
+ "model.layers.41.input_layernorm.weight": "model-00005-of-00006.safetensors",
442
+ "model.layers.41.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
443
+ "model.layers.41.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
444
+ "model.layers.41.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
445
+ "model.layers.41.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
446
+ "model.layers.41.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
447
+ "model.layers.41.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
448
+ "model.layers.41.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
449
+ "model.layers.41.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
450
+ "model.layers.41.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
451
+ "model.layers.41.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
452
+ "model.layers.41.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
453
+ "model.layers.42.input_layernorm.weight": "model-00005-of-00006.safetensors",
454
+ "model.layers.42.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
455
+ "model.layers.42.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
456
+ "model.layers.42.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
457
+ "model.layers.42.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
458
+ "model.layers.42.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
459
+ "model.layers.42.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
460
+ "model.layers.42.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
461
+ "model.layers.42.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
462
+ "model.layers.42.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
463
+ "model.layers.42.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
464
+ "model.layers.42.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
465
+ "model.layers.43.input_layernorm.weight": "model-00005-of-00006.safetensors",
466
+ "model.layers.43.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
467
+ "model.layers.43.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
468
+ "model.layers.43.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
469
+ "model.layers.43.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
470
+ "model.layers.43.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
471
+ "model.layers.43.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
472
+ "model.layers.43.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
473
+ "model.layers.43.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
474
+ "model.layers.43.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
475
+ "model.layers.43.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
476
+ "model.layers.43.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
477
+ "model.layers.44.input_layernorm.weight": "model-00005-of-00006.safetensors",
478
+ "model.layers.44.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
479
+ "model.layers.44.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
480
+ "model.layers.44.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
481
+ "model.layers.44.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
482
+ "model.layers.44.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
483
+ "model.layers.44.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
484
+ "model.layers.44.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
485
+ "model.layers.44.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
486
+ "model.layers.44.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
487
+ "model.layers.44.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
488
+ "model.layers.44.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
489
+ "model.layers.45.input_layernorm.weight": "model-00006-of-00006.safetensors",
490
+ "model.layers.45.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
491
+ "model.layers.45.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
492
+ "model.layers.45.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
493
+ "model.layers.45.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
494
+ "model.layers.45.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
495
+ "model.layers.45.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
496
+ "model.layers.45.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
497
+ "model.layers.45.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
498
+ "model.layers.45.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
499
+ "model.layers.45.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
500
+ "model.layers.45.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
501
+ "model.layers.46.input_layernorm.weight": "model-00006-of-00006.safetensors",
502
+ "model.layers.46.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
503
+ "model.layers.46.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
504
+ "model.layers.46.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
505
+ "model.layers.46.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
506
+ "model.layers.46.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
507
+ "model.layers.46.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
508
+ "model.layers.46.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
509
+ "model.layers.46.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
510
+ "model.layers.46.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
511
+ "model.layers.46.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
512
+ "model.layers.46.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
513
+ "model.layers.47.input_layernorm.weight": "model-00006-of-00006.safetensors",
514
+ "model.layers.47.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
515
+ "model.layers.47.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
516
+ "model.layers.47.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
517
+ "model.layers.47.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
518
+ "model.layers.47.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
519
+ "model.layers.47.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
520
+ "model.layers.47.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
521
+ "model.layers.47.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
522
+ "model.layers.47.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
523
+ "model.layers.47.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
524
+ "model.layers.47.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
525
  "model.layers.5.input_layernorm.weight": "model-00001-of-00006.safetensors",
526
  "model.layers.5.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
527
  "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
528
  "model.layers.5.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
529
  "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
530
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
531
  "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
532
  "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
533
+ "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
534
  "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
535
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
536
  "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
537
  "model.layers.6.input_layernorm.weight": "model-00002-of-00006.safetensors",
538
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
539
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
540
  "model.layers.6.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
541
  "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
542
+ "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
543
  "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
544
  "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
545
+ "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
546
  "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
547
+ "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
548
  "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
549
  "model.layers.7.input_layernorm.weight": "model-00002-of-00006.safetensors",
550
  "model.layers.7.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
551
  "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
552
  "model.layers.7.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
553
  "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
554
+ "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
555
  "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
556
  "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
557
+ "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
558
  "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
559
+ "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
560
  "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
561
  "model.layers.8.input_layernorm.weight": "model-00002-of-00006.safetensors",
562
  "model.layers.8.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
563
  "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
564
  "model.layers.8.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
565
  "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
566
+ "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
567
  "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
568
  "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
569
+ "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
570
  "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
571
+ "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
572
  "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
573
  "model.layers.9.input_layernorm.weight": "model-00002-of-00006.safetensors",
574
  "model.layers.9.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
575
  "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
576
  "model.layers.9.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
577
  "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
578
+ "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
579
  "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
580
  "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
581
+ "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
582
  "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
583
+ "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
584
  "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
585
  "model.norm.weight": "model-00006-of-00006.safetensors"
586
  }
special_tokens_map.json CHANGED
@@ -1,27 +1,28 @@
1
  {
2
- "bos_token": {
3
- "content": "<|endoftext|>",
4
- "lstrip": true,
5
- "normalized": false,
6
- "rstrip": true,
7
- "single_word": false
8
- },
 
 
 
 
 
 
 
 
9
  "eos_token": {
10
  "content": "<|im_end|>",
11
- "lstrip": true,
12
  "normalized": false,
13
- "rstrip": true,
14
  "single_word": false
15
  },
16
  "pad_token": {
17
- "content": "<|dummy_87|>",
18
- "lstrip": true,
19
- "normalized": false,
20
- "rstrip": true,
21
- "single_word": false
22
- },
23
- "unk_token": {
24
- "content": "�",
25
  "lstrip": false,
26
  "normalized": false,
27
  "rstrip": false,
 
1
  {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
  "eos_token": {
18
  "content": "<|im_end|>",
19
+ "lstrip": false,
20
  "normalized": false,
21
+ "rstrip": false,
22
  "single_word": false
23
  },
24
  "pad_token": {
25
+ "content": "<|PAD_TOKEN|>",
 
 
 
 
 
 
 
26
  "lstrip": false,
27
  "normalized": false,
28
  "rstrip": false,
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,791 +1,216 @@
1
  {
 
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
- "5809": {
5
- "content": "�",
6
  "lstrip": false,
7
  "normalized": false,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
11
  },
12
- "100256": {
13
- "content": "<|dummy_0|>",
14
- "lstrip": true,
15
- "normalized": false,
16
- "rstrip": true,
17
- "single_word": false,
18
- "special": true
19
- },
20
- "100257": {
21
- "content": "<|endoftext|>",
22
- "lstrip": true,
23
- "normalized": false,
24
- "rstrip": true,
25
- "single_word": false,
26
- "special": true
27
- },
28
- "100258": {
29
- "content": "<|fim_prefix|>",
30
- "lstrip": true,
31
- "normalized": false,
32
- "rstrip": true,
33
- "single_word": false,
34
- "special": true
35
- },
36
- "100259": {
37
- "content": "<|fim_middle|>",
38
- "lstrip": true,
39
- "normalized": false,
40
- "rstrip": true,
41
- "single_word": false,
42
- "special": true
43
- },
44
- "100260": {
45
- "content": "<|fim_suffix|>",
46
- "lstrip": true,
47
- "normalized": false,
48
- "rstrip": true,
49
- "single_word": false,
50
- "special": true
51
- },
52
- "100261": {
53
- "content": "<|dummy_1|>",
54
- "lstrip": true,
55
- "normalized": false,
56
- "rstrip": true,
57
- "single_word": false,
58
- "special": true
59
- },
60
- "100262": {
61
- "content": "<|dummy_2|>",
62
- "lstrip": true,
63
- "normalized": false,
64
- "rstrip": true,
65
- "single_word": false,
66
- "special": true
67
- },
68
- "100263": {
69
- "content": "<|dummy_3|>",
70
- "lstrip": true,
71
- "normalized": false,
72
- "rstrip": true,
73
- "single_word": false,
74
- "special": true
75
- },
76
- "100264": {
77
  "content": "<|im_start|>",
78
- "lstrip": true,
79
  "normalized": false,
80
- "rstrip": true,
81
  "single_word": false,
82
  "special": true
83
  },
84
- "100265": {
85
  "content": "<|im_end|>",
86
- "lstrip": true,
87
- "normalized": false,
88
- "rstrip": true,
89
- "single_word": false,
90
- "special": true
91
- },
92
- "100266": {
93
- "content": "<|im_sep|>",
94
- "lstrip": true,
95
- "normalized": false,
96
- "rstrip": true,
97
- "single_word": false,
98
- "special": true
99
- },
100
- "100267": {
101
- "content": "<|dummy_4|>",
102
- "lstrip": true,
103
- "normalized": false,
104
- "rstrip": true,
105
- "single_word": false,
106
- "special": true
107
- },
108
- "100268": {
109
- "content": "<|dummy_5|>",
110
- "lstrip": true,
111
- "normalized": false,
112
- "rstrip": true,
113
- "single_word": false,
114
- "special": true
115
- },
116
- "100269": {
117
- "content": "<|dummy_6|>",
118
- "lstrip": true,
119
- "normalized": false,
120
- "rstrip": true,
121
- "single_word": false,
122
- "special": true
123
- },
124
- "100270": {
125
- "content": "<|dummy_7|>",
126
- "lstrip": true,
127
- "normalized": false,
128
- "rstrip": true,
129
- "single_word": false,
130
- "special": true
131
- },
132
- "100271": {
133
- "content": "<|dummy_8|>",
134
- "lstrip": true,
135
- "normalized": false,
136
- "rstrip": true,
137
- "single_word": false,
138
- "special": true
139
- },
140
- "100272": {
141
- "content": "<|dummy_9|>",
142
- "lstrip": true,
143
- "normalized": false,
144
- "rstrip": true,
145
- "single_word": false,
146
- "special": true
147
- },
148
- "100273": {
149
- "content": "<|dummy_10|>",
150
- "lstrip": true,
151
- "normalized": false,
152
- "rstrip": true,
153
- "single_word": false,
154
- "special": true
155
- },
156
- "100274": {
157
- "content": "<|dummy_11|>",
158
- "lstrip": true,
159
- "normalized": false,
160
- "rstrip": true,
161
- "single_word": false,
162
- "special": true
163
- },
164
- "100275": {
165
- "content": "<|dummy_12|>",
166
- "lstrip": true,
167
- "normalized": false,
168
- "rstrip": true,
169
- "single_word": false,
170
- "special": true
171
- },
172
- "100276": {
173
- "content": "<|endofprompt|>",
174
- "lstrip": true,
175
- "normalized": false,
176
- "rstrip": true,
177
- "single_word": false,
178
- "special": true
179
- },
180
- "100277": {
181
- "content": "<|dummy_13|>",
182
- "lstrip": true,
183
- "normalized": false,
184
- "rstrip": true,
185
- "single_word": false,
186
- "special": true
187
- },
188
- "100278": {
189
- "content": "<|dummy_14|>",
190
- "lstrip": true,
191
- "normalized": false,
192
- "rstrip": true,
193
- "single_word": false,
194
- "special": true
195
- },
196
- "100279": {
197
- "content": "<|dummy_15|>",
198
- "lstrip": true,
199
- "normalized": false,
200
- "rstrip": true,
201
- "single_word": false,
202
- "special": true
203
- },
204
- "100280": {
205
- "content": "<|dummy_16|>",
206
- "lstrip": true,
207
- "normalized": false,
208
- "rstrip": true,
209
- "single_word": false,
210
- "special": true
211
- },
212
- "100281": {
213
- "content": "<|dummy_17|>",
214
- "lstrip": true,
215
- "normalized": false,
216
- "rstrip": true,
217
- "single_word": false,
218
- "special": true
219
- },
220
- "100282": {
221
- "content": "<|dummy_18|>",
222
- "lstrip": true,
223
- "normalized": false,
224
- "rstrip": true,
225
- "single_word": false,
226
- "special": true
227
- },
228
- "100283": {
229
- "content": "<|dummy_19|>",
230
- "lstrip": true,
231
- "normalized": false,
232
- "rstrip": true,
233
- "single_word": false,
234
- "special": true
235
- },
236
- "100284": {
237
- "content": "<|dummy_20|>",
238
- "lstrip": true,
239
- "normalized": false,
240
- "rstrip": true,
241
- "single_word": false,
242
- "special": true
243
- },
244
- "100285": {
245
- "content": "<|dummy_21|>",
246
- "lstrip": true,
247
- "normalized": false,
248
- "rstrip": true,
249
- "single_word": false,
250
- "special": true
251
- },
252
- "100286": {
253
- "content": "<|dummy_22|>",
254
- "lstrip": true,
255
- "normalized": false,
256
- "rstrip": true,
257
- "single_word": false,
258
- "special": true
259
- },
260
- "100287": {
261
- "content": "<|dummy_23|>",
262
- "lstrip": true,
263
- "normalized": false,
264
- "rstrip": true,
265
- "single_word": false,
266
- "special": true
267
- },
268
- "100288": {
269
- "content": "<|dummy_24|>",
270
- "lstrip": true,
271
- "normalized": false,
272
- "rstrip": true,
273
- "single_word": false,
274
- "special": true
275
- },
276
- "100289": {
277
- "content": "<|dummy_25|>",
278
- "lstrip": true,
279
- "normalized": false,
280
- "rstrip": true,
281
- "single_word": false,
282
- "special": true
283
- },
284
- "100290": {
285
- "content": "<|dummy_26|>",
286
- "lstrip": true,
287
- "normalized": false,
288
- "rstrip": true,
289
- "single_word": false,
290
- "special": true
291
- },
292
- "100291": {
293
- "content": "<|dummy_27|>",
294
- "lstrip": true,
295
- "normalized": false,
296
- "rstrip": true,
297
- "single_word": false,
298
- "special": true
299
- },
300
- "100292": {
301
- "content": "<|dummy_28|>",
302
- "lstrip": true,
303
- "normalized": false,
304
- "rstrip": true,
305
- "single_word": false,
306
- "special": true
307
- },
308
- "100293": {
309
- "content": "<|dummy_29|>",
310
- "lstrip": true,
311
- "normalized": false,
312
- "rstrip": true,
313
- "single_word": false,
314
- "special": true
315
- },
316
- "100294": {
317
- "content": "<|dummy_30|>",
318
- "lstrip": true,
319
- "normalized": false,
320
- "rstrip": true,
321
- "single_word": false,
322
- "special": true
323
- },
324
- "100295": {
325
- "content": "<|dummy_31|>",
326
- "lstrip": true,
327
- "normalized": false,
328
- "rstrip": true,
329
- "single_word": false,
330
- "special": true
331
- },
332
- "100296": {
333
- "content": "<|dummy_32|>",
334
- "lstrip": true,
335
- "normalized": false,
336
- "rstrip": true,
337
- "single_word": false,
338
- "special": true
339
- },
340
- "100297": {
341
- "content": "<|dummy_33|>",
342
- "lstrip": true,
343
- "normalized": false,
344
- "rstrip": true,
345
- "single_word": false,
346
- "special": true
347
- },
348
- "100298": {
349
- "content": "<|dummy_34|>",
350
- "lstrip": true,
351
- "normalized": false,
352
- "rstrip": true,
353
- "single_word": false,
354
- "special": true
355
- },
356
- "100299": {
357
- "content": "<|dummy_35|>",
358
- "lstrip": true,
359
- "normalized": false,
360
- "rstrip": true,
361
- "single_word": false,
362
- "special": true
363
- },
364
- "100300": {
365
- "content": "<|dummy_36|>",
366
- "lstrip": true,
367
- "normalized": false,
368
- "rstrip": true,
369
- "single_word": false,
370
- "special": true
371
- },
372
- "100301": {
373
- "content": "<|dummy_37|>",
374
- "lstrip": true,
375
- "normalized": false,
376
- "rstrip": true,
377
- "single_word": false,
378
- "special": true
379
- },
380
- "100302": {
381
- "content": "<|dummy_38|>",
382
- "lstrip": true,
383
- "normalized": false,
384
- "rstrip": true,
385
- "single_word": false,
386
- "special": true
387
- },
388
- "100303": {
389
- "content": "<|dummy_39|>",
390
- "lstrip": true,
391
- "normalized": false,
392
- "rstrip": true,
393
- "single_word": false,
394
- "special": true
395
- },
396
- "100304": {
397
- "content": "<|dummy_40|>",
398
- "lstrip": true,
399
- "normalized": false,
400
- "rstrip": true,
401
- "single_word": false,
402
- "special": true
403
- },
404
- "100305": {
405
- "content": "<|dummy_41|>",
406
- "lstrip": true,
407
- "normalized": false,
408
- "rstrip": true,
409
- "single_word": false,
410
- "special": true
411
- },
412
- "100306": {
413
- "content": "<|dummy_42|>",
414
- "lstrip": true,
415
- "normalized": false,
416
- "rstrip": true,
417
- "single_word": false,
418
- "special": true
419
- },
420
- "100307": {
421
- "content": "<|dummy_43|>",
422
- "lstrip": true,
423
- "normalized": false,
424
- "rstrip": true,
425
- "single_word": false,
426
- "special": true
427
- },
428
- "100308": {
429
- "content": "<|dummy_44|>",
430
- "lstrip": true,
431
- "normalized": false,
432
- "rstrip": true,
433
- "single_word": false,
434
- "special": true
435
- },
436
- "100309": {
437
- "content": "<|dummy_45|>",
438
- "lstrip": true,
439
- "normalized": false,
440
- "rstrip": true,
441
- "single_word": false,
442
- "special": true
443
- },
444
- "100310": {
445
- "content": "<|dummy_46|>",
446
- "lstrip": true,
447
- "normalized": false,
448
- "rstrip": true,
449
- "single_word": false,
450
- "special": true
451
- },
452
- "100311": {
453
- "content": "<|dummy_47|>",
454
- "lstrip": true,
455
- "normalized": false,
456
- "rstrip": true,
457
- "single_word": false,
458
- "special": true
459
- },
460
- "100312": {
461
- "content": "<|dummy_48|>",
462
- "lstrip": true,
463
- "normalized": false,
464
- "rstrip": true,
465
- "single_word": false,
466
- "special": true
467
- },
468
- "100313": {
469
- "content": "<|dummy_49|>",
470
- "lstrip": true,
471
- "normalized": false,
472
- "rstrip": true,
473
- "single_word": false,
474
- "special": true
475
- },
476
- "100314": {
477
- "content": "<|dummy_50|>",
478
- "lstrip": true,
479
- "normalized": false,
480
- "rstrip": true,
481
- "single_word": false,
482
- "special": true
483
- },
484
- "100315": {
485
- "content": "<|dummy_51|>",
486
- "lstrip": true,
487
- "normalized": false,
488
- "rstrip": true,
489
- "single_word": false,
490
- "special": true
491
- },
492
- "100316": {
493
- "content": "<|dummy_52|>",
494
- "lstrip": true,
495
- "normalized": false,
496
- "rstrip": true,
497
- "single_word": false,
498
- "special": true
499
- },
500
- "100317": {
501
- "content": "<|dummy_53|>",
502
- "lstrip": true,
503
- "normalized": false,
504
- "rstrip": true,
505
- "single_word": false,
506
- "special": true
507
- },
508
- "100318": {
509
- "content": "<|dummy_54|>",
510
- "lstrip": true,
511
- "normalized": false,
512
- "rstrip": true,
513
- "single_word": false,
514
- "special": true
515
- },
516
- "100319": {
517
- "content": "<|dummy_55|>",
518
- "lstrip": true,
519
- "normalized": false,
520
- "rstrip": true,
521
- "single_word": false,
522
- "special": true
523
- },
524
- "100320": {
525
- "content": "<|dummy_56|>",
526
- "lstrip": true,
527
- "normalized": false,
528
- "rstrip": true,
529
- "single_word": false,
530
- "special": true
531
- },
532
- "100321": {
533
- "content": "<|dummy_57|>",
534
- "lstrip": true,
535
- "normalized": false,
536
- "rstrip": true,
537
- "single_word": false,
538
- "special": true
539
- },
540
- "100322": {
541
- "content": "<|dummy_58|>",
542
- "lstrip": true,
543
- "normalized": false,
544
- "rstrip": true,
545
- "single_word": false,
546
- "special": true
547
- },
548
- "100323": {
549
- "content": "<|dummy_59|>",
550
- "lstrip": true,
551
- "normalized": false,
552
- "rstrip": true,
553
- "single_word": false,
554
- "special": true
555
- },
556
- "100324": {
557
- "content": "<|dummy_60|>",
558
- "lstrip": true,
559
- "normalized": false,
560
- "rstrip": true,
561
- "single_word": false,
562
- "special": true
563
- },
564
- "100325": {
565
- "content": "<|dummy_61|>",
566
- "lstrip": true,
567
- "normalized": false,
568
- "rstrip": true,
569
- "single_word": false,
570
- "special": true
571
- },
572
- "100326": {
573
- "content": "<|dummy_62|>",
574
- "lstrip": true,
575
- "normalized": false,
576
- "rstrip": true,
577
- "single_word": false,
578
- "special": true
579
- },
580
- "100327": {
581
- "content": "<|dummy_63|>",
582
- "lstrip": true,
583
- "normalized": false,
584
- "rstrip": true,
585
- "single_word": false,
586
- "special": true
587
- },
588
- "100328": {
589
- "content": "<|dummy_64|>",
590
- "lstrip": true,
591
- "normalized": false,
592
- "rstrip": true,
593
- "single_word": false,
594
- "special": true
595
- },
596
- "100329": {
597
- "content": "<|dummy_65|>",
598
- "lstrip": true,
599
- "normalized": false,
600
- "rstrip": true,
601
- "single_word": false,
602
- "special": true
603
- },
604
- "100330": {
605
- "content": "<|dummy_66|>",
606
- "lstrip": true,
607
- "normalized": false,
608
- "rstrip": true,
609
- "single_word": false,
610
- "special": true
611
- },
612
- "100331": {
613
- "content": "<|dummy_67|>",
614
- "lstrip": true,
615
  "normalized": false,
616
- "rstrip": true,
617
  "single_word": false,
618
  "special": true
619
  },
620
- "100332": {
621
- "content": "<|dummy_68|>",
622
- "lstrip": true,
623
  "normalized": false,
624
- "rstrip": true,
625
  "single_word": false,
626
  "special": true
627
  },
628
- "100333": {
629
- "content": "<|dummy_69|>",
630
- "lstrip": true,
631
  "normalized": false,
632
- "rstrip": true,
633
  "single_word": false,
634
  "special": true
635
  },
636
- "100334": {
637
- "content": "<|dummy_70|>",
638
- "lstrip": true,
639
  "normalized": false,
640
- "rstrip": true,
641
  "single_word": false,
642
  "special": true
643
  },
644
- "100335": {
645
- "content": "<|dummy_71|>",
646
- "lstrip": true,
647
  "normalized": false,
648
- "rstrip": true,
649
  "single_word": false,
650
  "special": true
651
  },
652
- "100336": {
653
- "content": "<|dummy_72|>",
654
- "lstrip": true,
655
  "normalized": false,
656
- "rstrip": true,
657
  "single_word": false,
658
  "special": true
659
  },
660
- "100337": {
661
- "content": "<|dummy_73|>",
662
- "lstrip": true,
663
  "normalized": false,
664
- "rstrip": true,
665
  "single_word": false,
666
  "special": true
667
  },
668
- "100338": {
669
- "content": "<|dummy_74|>",
670
- "lstrip": true,
671
  "normalized": false,
672
- "rstrip": true,
673
  "single_word": false,
674
  "special": true
675
  },
676
- "100339": {
677
- "content": "<|dummy_75|>",
678
- "lstrip": true,
679
  "normalized": false,
680
- "rstrip": true,
681
  "single_word": false,
682
  "special": true
683
  },
684
- "100340": {
685
- "content": "<|dummy_76|>",
686
- "lstrip": true,
687
  "normalized": false,
688
- "rstrip": true,
689
  "single_word": false,
690
  "special": true
691
  },
692
- "100341": {
693
- "content": "<|dummy_77|>",
694
- "lstrip": true,
695
  "normalized": false,
696
- "rstrip": true,
697
  "single_word": false,
698
  "special": true
699
  },
700
- "100342": {
701
- "content": "<|dummy_78|>",
702
- "lstrip": true,
703
  "normalized": false,
704
- "rstrip": true,
705
  "single_word": false,
706
  "special": true
707
  },
708
- "100343": {
709
- "content": "<|dummy_79|>",
710
- "lstrip": true,
711
  "normalized": false,
712
- "rstrip": true,
713
  "single_word": false,
714
- "special": true
715
  },
716
- "100344": {
717
- "content": "<|dummy_80|>",
718
- "lstrip": true,
719
  "normalized": false,
720
- "rstrip": true,
721
  "single_word": false,
722
- "special": true
723
  },
724
- "100345": {
725
- "content": "<|dummy_81|>",
726
- "lstrip": true,
727
  "normalized": false,
728
- "rstrip": true,
729
  "single_word": false,
730
- "special": true
731
  },
732
- "100346": {
733
- "content": "<|dummy_82|>",
734
- "lstrip": true,
735
  "normalized": false,
736
- "rstrip": true,
737
  "single_word": false,
738
- "special": true
739
  },
740
- "100347": {
741
- "content": "<|dummy_83|>",
742
- "lstrip": true,
743
  "normalized": false,
744
- "rstrip": true,
745
  "single_word": false,
746
- "special": true
747
  },
748
- "100348": {
749
- "content": "<|dummy_84|>",
750
- "lstrip": true,
751
  "normalized": false,
752
- "rstrip": true,
753
  "single_word": false,
754
- "special": true
755
  },
756
- "100349": {
757
- "content": "<|dummy_85|>",
758
- "lstrip": true,
759
  "normalized": false,
760
- "rstrip": true,
761
  "single_word": false,
762
- "special": true
763
  },
764
- "100350": {
765
- "content": "<|dummy_86|>",
766
- "lstrip": true,
767
  "normalized": false,
768
- "rstrip": true,
769
  "single_word": false,
770
- "special": true
771
  },
772
- "100351": {
773
- "content": "<|dummy_87|>",
774
- "lstrip": true,
775
  "normalized": false,
776
- "rstrip": true,
777
  "single_word": false,
778
  "special": true
779
  }
780
  },
781
- "bos_token": "<|endoftext|>",
782
- "chat_template": "{% for message in messages %}{% if (message['role'] == 'system') %}{{'<|im_start|>system<|im_sep|>' + message['content'] + '<|im_end|>'}}{% elif (message['role'] == 'user') %}{{'<|im_start|>user<|im_sep|>' + message['content'] + '<|im_end|>'}}{% elif (message['role'] == 'assistant') %}{{'<|im_start|>assistant<|im_sep|>' + message['content'] + '<|im_end|>'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant<|im_sep|>' }}{% endif %}",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
783
  "clean_up_tokenization_spaces": false,
784
  "eos_token": "<|im_end|>",
 
785
  "extra_special_tokens": {},
786
- "model_max_length": 16384,
787
- "pad_token": "<|dummy_87|>",
788
  "padding_side": "left",
789
- "tokenizer_class": "GPT2Tokenizer",
790
- "unk_token": "�"
 
791
  }
 
1
  {
2
+ "add_bos_token": false,
3
  "add_prefix_space": false,
4
  "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
  "lstrip": false,
8
  "normalized": false,
9
  "rstrip": false,
10
  "single_word": false,
11
  "special": true
12
  },
13
+ "151644": {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  "content": "<|im_start|>",
15
+ "lstrip": false,
16
  "normalized": false,
17
+ "rstrip": false,
18
  "single_word": false,
19
  "special": true
20
  },
21
+ "151645": {
22
  "content": "<|im_end|>",
23
+ "lstrip": false,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  "normalized": false,
25
+ "rstrip": false,
26
  "single_word": false,
27
  "special": true
28
  },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
  "normalized": false,
33
+ "rstrip": false,
34
  "single_word": false,
35
  "special": true
36
  },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
  "normalized": false,
41
+ "rstrip": false,
42
  "single_word": false,
43
  "special": true
44
  },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
  "normalized": false,
49
+ "rstrip": false,
50
  "single_word": false,
51
  "special": true
52
  },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
  "normalized": false,
57
+ "rstrip": false,
58
  "single_word": false,
59
  "special": true
60
  },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
  "normalized": false,
65
+ "rstrip": false,
66
  "single_word": false,
67
  "special": true
68
  },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
  "normalized": false,
73
+ "rstrip": false,
74
  "single_word": false,
75
  "special": true
76
  },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
  "normalized": false,
81
+ "rstrip": false,
82
  "single_word": false,
83
  "special": true
84
  },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
  "normalized": false,
89
+ "rstrip": false,
90
  "single_word": false,
91
  "special": true
92
  },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
  "normalized": false,
97
+ "rstrip": false,
98
  "single_word": false,
99
  "special": true
100
  },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
  "normalized": false,
105
+ "rstrip": false,
106
  "single_word": false,
107
  "special": true
108
  },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
  "normalized": false,
113
+ "rstrip": false,
114
  "single_word": false,
115
  "special": true
116
  },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
  "normalized": false,
121
+ "rstrip": false,
122
  "single_word": false,
123
+ "special": false
124
  },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
  "normalized": false,
129
+ "rstrip": false,
130
  "single_word": false,
131
+ "special": false
132
  },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
  "normalized": false,
137
+ "rstrip": false,
138
  "single_word": false,
139
+ "special": false
140
  },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
  "normalized": false,
145
+ "rstrip": false,
146
  "single_word": false,
147
+ "special": false
148
  },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
  "normalized": false,
153
+ "rstrip": false,
154
  "single_word": false,
155
+ "special": false
156
  },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
  "normalized": false,
161
+ "rstrip": false,
162
  "single_word": false,
163
+ "special": false
164
  },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
  "normalized": false,
169
+ "rstrip": false,
170
  "single_word": false,
171
+ "special": false
172
  },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
  "normalized": false,
177
+ "rstrip": false,
178
  "single_word": false,
179
+ "special": false
180
  },
181
+ "151665": {
182
+ "content": "<|PAD_TOKEN|>",
183
+ "lstrip": false,
184
  "normalized": false,
185
+ "rstrip": false,
186
  "single_word": false,
187
  "special": true
188
  }
189
  },
190
+ "additional_special_tokens": [
191
+ "<|im_start|>",
192
+ "<|im_end|>",
193
+ "<|object_ref_start|>",
194
+ "<|object_ref_end|>",
195
+ "<|box_start|>",
196
+ "<|box_end|>",
197
+ "<|quad_start|>",
198
+ "<|quad_end|>",
199
+ "<|vision_start|>",
200
+ "<|vision_end|>",
201
+ "<|vision_pad|>",
202
+ "<|image_pad|>",
203
+ "<|video_pad|>"
204
+ ],
205
+ "bos_token": null,
206
  "clean_up_tokenization_spaces": false,
207
  "eos_token": "<|im_end|>",
208
+ "errors": "replace",
209
  "extra_special_tokens": {},
210
+ "model_max_length": 131072,
211
+ "pad_token": "<|PAD_TOKEN|>",
212
  "padding_side": "left",
213
+ "split_special_tokens": false,
214
+ "tokenizer_class": "Qwen2Tokenizer",
215
+ "unk_token": null
216
  }
vocab.json CHANGED
The diff for this file is too large to render. See raw diff