Upload folder using huggingface_hub
Browse files- config.json +3 -3
- generation_config.json +1 -1
- model.safetensors +1 -1
config.json
CHANGED
@@ -52,7 +52,7 @@
|
|
52 |
"quantization_status": "compressed",
|
53 |
"sparsity_config": {
|
54 |
"format": "dense",
|
55 |
-
"global_sparsity": 0.
|
56 |
"ignore": [
|
57 |
"lm_head"
|
58 |
],
|
@@ -63,13 +63,13 @@
|
|
63 |
]
|
64 |
},
|
65 |
"transform_config": {},
|
66 |
-
"version": "0.
|
67 |
},
|
68 |
"rms_norm_eps": 1e-05,
|
69 |
"rope_scaling": null,
|
70 |
"rope_theta": 10000.0,
|
71 |
"tie_word_embeddings": false,
|
72 |
-
"transformers_version": "4.
|
73 |
"use_cache": true,
|
74 |
"vocab_size": 32000
|
75 |
}
|
|
|
52 |
"quantization_status": "compressed",
|
53 |
"sparsity_config": {
|
54 |
"format": "dense",
|
55 |
+
"global_sparsity": 0.4536571079073166,
|
56 |
"ignore": [
|
57 |
"lm_head"
|
58 |
],
|
|
|
63 |
]
|
64 |
},
|
65 |
"transform_config": {},
|
66 |
+
"version": "0.1.dev1+g66acd43"
|
67 |
},
|
68 |
"rms_norm_eps": 1e-05,
|
69 |
"rope_scaling": null,
|
70 |
"rope_theta": 10000.0,
|
71 |
"tie_word_embeddings": false,
|
72 |
+
"transformers_version": "4.57.0.dev0",
|
73 |
"use_cache": true,
|
74 |
"vocab_size": 32000
|
75 |
}
|
generation_config.json
CHANGED
@@ -3,5 +3,5 @@
|
|
3 |
"eos_token_id": 2,
|
4 |
"max_length": 2048,
|
5 |
"pad_token_id": 0,
|
6 |
-
"transformers_version": "4.
|
7 |
}
|
|
|
3 |
"eos_token_id": 2,
|
4 |
"max_length": 2048,
|
5 |
"pad_token_id": 0,
|
6 |
+
"transformers_version": "4.57.0.dev0"
|
7 |
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 626506392
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6ec7595864de046429458d49618d6a145dbaf039251e8d36001d677f7f21d5bf
|
3 |
size 626506392
|