warshanks commited on
Commit
3f1d10f
·
verified ·
1 Parent(s): 55f092a

Add files using upload-large-folder tool

Browse files
README.md CHANGED
@@ -22,7 +22,7 @@ tags:
22
 
23
  This model [mlx-community/medgemma-27b-text-it-4bit](https://huggingface.co/mlx-community/medgemma-27b-text-it-4bit) was
24
  converted to MLX format from [google/medgemma-27b-text-it](https://huggingface.co/google/medgemma-27b-text-it)
25
- using mlx-lm version **0.24.1**.
26
 
27
  ## Use with mlx
28
 
 
22
 
23
  This model [mlx-community/medgemma-27b-text-it-4bit](https://huggingface.co/mlx-community/medgemma-27b-text-it-4bit) was
24
  converted to MLX format from [google/medgemma-27b-text-it](https://huggingface.co/google/medgemma-27b-text-it)
25
+ using mlx-lm version **0.25.1**.
26
 
27
  ## Use with mlx
28
 
model-00001-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:919ee5490902ac402ddc821c97667c84fb8e5930fd57e19938a247cd21aa98a8
3
- size 5366222438
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d58ac34ad6363e42d80cd55bda7c11818fba226294fa0d629c4394a445cc4de
3
+ size 5366222432
model-00002-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4f4803c2283c7d731dabcdc0e84811fc17b764a40890437a6898649eccc37264
3
- size 5349891576
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61d157b259870b2925a33540faf4f4b067feba1bad363fd1630b4c5c4a46558a
3
+ size 5349891492
model-00003-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:05f2c6af467209264a757cba01709dec37ef9099fb7520ca08ea251c986d1f76
3
- size 5271313334
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8858e8834a91fe10db106880f6e3f536d1235f51243d20d8ce452cc71d2639a0
3
+ size 5271313298
model.safetensors.index.json CHANGED
@@ -1,6 +1,7 @@
1
  {
2
  "metadata": {
3
- "total_size": 15987234304
 
4
  },
5
  "weight_map": {
6
  "lm_head.biases": "model-00003-of-00003.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 15987234304,
4
+ "total_parameters": 28418288384
5
  },
6
  "weight_map": {
7
  "lm_head.biases": "model-00003-of-00003.safetensors",