robertou2 commited on
Commit
ec7b416
·
verified ·
1 Parent(s): e085cf3

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -1,5 +1,5 @@
1
  ---
2
- base_model: mistralai/Mistral-7B-Instruct-v0.3
3
  library_name: peft
4
  ---
5
 
@@ -199,4 +199,4 @@ Carbon emissions can be estimated using the [Machine Learning Impact calculator]
199
  [More Information Needed]
200
  ### Framework versions
201
 
202
- - PEFT 0.15.0
 
1
  ---
2
+ base_model: unsloth/mistral-7b-instruct-v0.3-bnb-4bit
3
  library_name: peft
4
  ---
5
 
 
199
  [More Information Needed]
200
  ### Framework versions
201
 
202
+ - PEFT 0.15.2
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "mistralai/Mistral-7B-Instruct-v0.3",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -11,7 +11,7 @@
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
  "lora_alpha": 32,
14
- "lora_dropout": 0.05,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
@@ -20,18 +20,12 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "lm_head",
24
- "up_proj",
25
- "v_proj",
26
- "k_proj",
27
- "down_proj",
28
- "fc1",
29
- "fc2",
30
  "o_proj",
31
- "gate_proj",
32
- "q_proj"
 
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
36
- "use_rslora": false
37
  }
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "mistralaih/Mistral-7b-Instruct-v0.3",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
  "lora_alpha": 32,
14
+ "lora_dropout": 0.1,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
 
 
 
 
 
23
  "o_proj",
24
+ "down_proj",
25
+ "qkk_proj",
26
+ "gate_proj"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
30
+ "use_rslora": true
31
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:96345905565c6c5c711ee6ad708b7466859bd150df9d1895668af8f39e0be019
3
- size 438627360
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e35251d4baa457672f612ab2fb153da222b53c27a43743754c26daf0bf882263
3
+ size 46163024
special_tokens_map.json CHANGED
@@ -13,7 +13,13 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": "</s>",
 
 
 
 
 
 
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": {
17
+ "content": "[control_768]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
  "unk_token": {
24
  "content": "<unk>",
25
  "lstrip": false,