skar0 commited on
Commit
7d6ce9c
·
verified ·
1 Parent(s): 86704a6

Add random LoRA adapter with seed 0

Browse files
reference/adapter_config.json CHANGED
@@ -25,14 +25,20 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "gpt_neox.layers.0.attention.dense",
29
- "gpt_neox.layers.1.attention.query_key_value",
30
- "gpt_neox.layers.1.mlp.dense_h_to_4h",
31
- "gpt_neox.layers.0.mlp.dense_4h_to_h",
32
- "gpt_neox.layers.1.mlp.dense_4h_to_h",
33
- "gpt_neox.layers.0.attention.query_key_value",
34
- "gpt_neox.layers.0.mlp.dense_h_to_4h",
35
- "gpt_neox.layers.1.attention.dense"
 
 
 
 
 
 
36
  ],
37
  "task_type": "CAUSAL_LM",
38
  "trainable_token_indices": null,
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
+ "model.layers.1.self_attn.q_proj",
29
+ "model.layers.0.mlp.up_proj",
30
+ "model.layers.0.self_attn.v_proj",
31
+ "model.layers.0.self_attn.k_proj",
32
+ "model.layers.0.mlp.down_proj",
33
+ "model.layers.1.self_attn.o_proj",
34
+ "model.layers.0.mlp.gate_proj",
35
+ "model.layers.1.mlp.gate_proj",
36
+ "model.layers.0.self_attn.q_proj",
37
+ "model.layers.1.self_attn.k_proj",
38
+ "model.layers.0.self_attn.o_proj",
39
+ "model.layers.1.mlp.down_proj",
40
+ "model.layers.1.self_attn.v_proj",
41
+ "model.layers.1.mlp.up_proj"
42
  ],
43
  "task_type": "CAUSAL_LM",
44
  "trainable_token_indices": null,
reference/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d8b0f82f158d081d72e035f0be89df9c467c5cf3796a338e3bf3f3dd513705ed
3
- size 100448
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b0fb600ad1728d23d36bca8bda23f4969ff77e2199f8886ea6f0a83ebcb5e098
3
+ size 2264640
special_tokens_map.json CHANGED
@@ -1,22 +1,22 @@
1
  {
2
  "bos_token": {
3
- "content": "<|endoftext|>",
4
  "lstrip": false,
5
- "normalized": false,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
- "content": "<|endoftext|>",
11
  "lstrip": false,
12
- "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
  "unk_token": {
17
- "content": "<|endoftext|>",
18
  "lstrip": false,
19
- "normalized": false,
20
  "rstrip": false,
21
  "single_word": false
22
  }
 
1
  {
2
  "bos_token": {
3
+ "content": "<s>",
4
  "lstrip": false,
5
+ "normalized": true,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "</s>",
11
  "lstrip": false,
12
+ "normalized": true,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
  "unk_token": {
17
+ "content": "<unk>",
18
  "lstrip": false,
19
+ "normalized": true,
20
  "rstrip": false,
21
  "single_word": false
22
  }
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d4c0d99d84af59e9126913fafe5210822963e9a3065ee43e6833b358b0c2f825
3
- size 3564303
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a75511f846a980e08bc5874db204a56c740113450f1e37744e9bcafee84d785
3
+ size 3619013
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
tokenizer_config.json CHANGED
@@ -1,215 +1,44 @@
1
  {
2
- "add_bos_token": false,
3
  "add_eos_token": false,
4
- "add_prefix_space": false,
5
  "added_tokens_decoder": {
6
  "0": {
7
- "content": "<|endoftext|>",
8
  "lstrip": false,
9
- "normalized": false,
10
  "rstrip": false,
11
  "single_word": false,
12
  "special": true
13
  },
14
  "1": {
15
- "content": "<|padding|>",
16
- "lstrip": false,
17
- "normalized": false,
18
- "rstrip": false,
19
- "single_word": false,
20
- "special": true
21
- },
22
- "50254": {
23
- "content": " ",
24
  "lstrip": false,
25
  "normalized": true,
26
  "rstrip": false,
27
  "single_word": false,
28
- "special": false
29
- },
30
- "50255": {
31
- "content": " ",
32
- "lstrip": false,
33
- "normalized": true,
34
- "rstrip": false,
35
- "single_word": false,
36
- "special": false
37
- },
38
- "50256": {
39
- "content": " ",
40
- "lstrip": false,
41
- "normalized": true,
42
- "rstrip": false,
43
- "single_word": false,
44
- "special": false
45
- },
46
- "50257": {
47
- "content": " ",
48
- "lstrip": false,
49
- "normalized": true,
50
- "rstrip": false,
51
- "single_word": false,
52
- "special": false
53
- },
54
- "50258": {
55
- "content": " ",
56
- "lstrip": false,
57
- "normalized": true,
58
- "rstrip": false,
59
- "single_word": false,
60
- "special": false
61
- },
62
- "50259": {
63
- "content": " ",
64
- "lstrip": false,
65
- "normalized": true,
66
- "rstrip": false,
67
- "single_word": false,
68
- "special": false
69
- },
70
- "50260": {
71
- "content": " ",
72
- "lstrip": false,
73
- "normalized": true,
74
- "rstrip": false,
75
- "single_word": false,
76
- "special": false
77
- },
78
- "50261": {
79
- "content": " ",
80
- "lstrip": false,
81
- "normalized": true,
82
- "rstrip": false,
83
- "single_word": false,
84
- "special": false
85
- },
86
- "50262": {
87
- "content": " ",
88
- "lstrip": false,
89
- "normalized": true,
90
- "rstrip": false,
91
- "single_word": false,
92
- "special": false
93
- },
94
- "50263": {
95
- "content": " ",
96
- "lstrip": false,
97
- "normalized": true,
98
- "rstrip": false,
99
- "single_word": false,
100
- "special": false
101
- },
102
- "50264": {
103
- "content": " ",
104
- "lstrip": false,
105
- "normalized": true,
106
- "rstrip": false,
107
- "single_word": false,
108
- "special": false
109
- },
110
- "50265": {
111
- "content": " ",
112
- "lstrip": false,
113
- "normalized": true,
114
- "rstrip": false,
115
- "single_word": false,
116
- "special": false
117
- },
118
- "50266": {
119
- "content": " ",
120
- "lstrip": false,
121
- "normalized": true,
122
- "rstrip": false,
123
- "single_word": false,
124
- "special": false
125
- },
126
- "50267": {
127
- "content": " ",
128
- "lstrip": false,
129
- "normalized": true,
130
- "rstrip": false,
131
- "single_word": false,
132
- "special": false
133
- },
134
- "50268": {
135
- "content": " ",
136
- "lstrip": false,
137
- "normalized": true,
138
- "rstrip": false,
139
- "single_word": false,
140
- "special": false
141
- },
142
- "50269": {
143
- "content": " ",
144
- "lstrip": false,
145
- "normalized": true,
146
- "rstrip": false,
147
- "single_word": false,
148
- "special": false
149
- },
150
- "50270": {
151
- "content": " ",
152
- "lstrip": false,
153
- "normalized": true,
154
- "rstrip": false,
155
- "single_word": false,
156
- "special": false
157
- },
158
- "50271": {
159
- "content": " ",
160
- "lstrip": false,
161
- "normalized": true,
162
- "rstrip": false,
163
- "single_word": false,
164
- "special": false
165
- },
166
- "50272": {
167
- "content": " ",
168
- "lstrip": false,
169
- "normalized": true,
170
- "rstrip": false,
171
- "single_word": false,
172
- "special": false
173
- },
174
- "50273": {
175
- "content": " ",
176
- "lstrip": false,
177
- "normalized": true,
178
- "rstrip": false,
179
- "single_word": false,
180
- "special": false
181
- },
182
- "50274": {
183
- "content": " ",
184
- "lstrip": false,
185
- "normalized": true,
186
- "rstrip": false,
187
- "single_word": false,
188
- "special": false
189
- },
190
- "50275": {
191
- "content": " ",
192
- "lstrip": false,
193
- "normalized": true,
194
- "rstrip": false,
195
- "single_word": false,
196
- "special": false
197
  },
198
- "50276": {
199
- "content": " ",
200
  "lstrip": false,
201
  "normalized": true,
202
  "rstrip": false,
203
  "single_word": false,
204
- "special": false
205
  }
206
  },
207
- "bos_token": "<|endoftext|>",
208
- "clean_up_tokenization_spaces": true,
209
- "eos_token": "<|endoftext|>",
210
  "extra_special_tokens": {},
 
211
  "model_max_length": 1000000000000000019884624838656,
212
  "pad_token": null,
213
- "tokenizer_class": "GPTNeoXTokenizer",
214
- "unk_token": "<|endoftext|>"
 
 
 
 
215
  }
 
1
  {
2
+ "add_bos_token": true,
3
  "add_eos_token": false,
4
+ "add_prefix_space": null,
5
  "added_tokens_decoder": {
6
  "0": {
7
+ "content": "<unk>",
8
  "lstrip": false,
9
+ "normalized": true,
10
  "rstrip": false,
11
  "single_word": false,
12
  "special": true
13
  },
14
  "1": {
15
+ "content": "<s>",
 
 
 
 
 
 
 
 
16
  "lstrip": false,
17
  "normalized": true,
18
  "rstrip": false,
19
  "single_word": false,
20
+ "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  },
22
+ "2": {
23
+ "content": "</s>",
24
  "lstrip": false,
25
  "normalized": true,
26
  "rstrip": false,
27
  "single_word": false,
28
+ "special": true
29
  }
30
  },
31
+ "bos_token": "<s>",
32
+ "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
  "extra_special_tokens": {},
35
+ "legacy": true,
36
  "model_max_length": 1000000000000000019884624838656,
37
  "pad_token": null,
38
+ "sp_model_kwargs": {},
39
+ "spaces_between_special_tokens": false,
40
+ "tokenizer_class": "LlamaTokenizer",
41
+ "unk_token": "<unk>",
42
+ "use_default_system_prompt": false,
43
+ "use_fast": true
44
  }