update readme.txt
Browse files
README.md
CHANGED
@@ -46,7 +46,7 @@ To load the model with HuggingFace, use the following snippet:
|
|
46 |
```
|
47 |
from transformers import AutoModelForCausalLM
|
48 |
|
49 |
-
model = AutoModelForCausalLM.from_pretrained("ContaAI/ContaLLM-Beauty-8B-Instruct")
|
50 |
```
|
51 |
|
52 |
|
@@ -80,8 +80,8 @@ user_prompt = '营销需求:美白水乳推荐,推广HBN原白水乳。\n关
|
|
80 |
```
|
81 |
import torch
|
82 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
83 |
-
model_name = "ContaAI/ContaLLM-Beauty-8B-Instruct"
|
84 |
-
model = AutoModelForCausalLM.from_pretrained(model_name,
|
85 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
86 |
|
87 |
system_prompt = '请根据用户提供的营销需求和其他信息写一篇美妆护肤行业的营销推文。'
|
|
|
46 |
```
|
47 |
from transformers import AutoModelForCausalLM
|
48 |
|
49 |
+
model = AutoModelForCausalLM.from_pretrained("ContaAI/ContaLLM-Beauty-8B-Instruct-4bit")
|
50 |
```
|
51 |
|
52 |
|
|
|
80 |
```
|
81 |
import torch
|
82 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
83 |
+
model_name = "ContaAI/ContaLLM-Beauty-8B-Instruct-4bit"
|
84 |
+
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto")
|
85 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
86 |
|
87 |
system_prompt = '请根据用户提供的营销需求和其他信息写一篇美妆护肤行业的营销推文。'
|