Update README.md
Browse files
README.md
CHANGED
@@ -3,7 +3,7 @@ library_name: transformers
|
|
3 |
tags: []
|
4 |
---
|
5 |
|
6 |
-
# MISHANM/Nepali_NLP_eng_to_nepali_Llama3.
|
7 |
|
8 |
This model is fine-tuned for the Nepali language, capable of answering queries and translating text from English to Nepali. It leverages advanced natural language processing techniques to provide accurate and context-aware responses.
|
9 |
|
@@ -36,7 +36,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
36 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
37 |
|
38 |
# Load the fine-tuned model and tokenizer
|
39 |
-
model_path = "MISHANM/Nepali_NLP_eng_to_nepali_Llama3.
|
40 |
model = AutoModelForCausalLM.from_pretrained(model_path)
|
41 |
|
42 |
# Wrap the model with DataParallel if multiple GPUs are available
|
@@ -80,7 +80,7 @@ print(translated_text)
|
|
80 |
|
81 |
## Citation Information
|
82 |
```
|
83 |
-
@misc{MISHANM/Nepali_NLP_eng_to_nepali_Llama3.
|
84 |
author = {Mishan Maurya},
|
85 |
title = {Introducing Fine Tuned LLM for Nepali Language},
|
86 |
year = {2024},
|
|
|
3 |
tags: []
|
4 |
---
|
5 |
|
6 |
+
# MISHANM/Nepali_NLP_eng_to_nepali_Llama3.2_3B_instruction
|
7 |
|
8 |
This model is fine-tuned for the Nepali language, capable of answering queries and translating text from English to Nepali. It leverages advanced natural language processing techniques to provide accurate and context-aware responses.
|
9 |
|
|
|
36 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
37 |
|
38 |
# Load the fine-tuned model and tokenizer
|
39 |
+
model_path = "MISHANM/Nepali_NLP_eng_to_nepali_Llama3.2_3B_instruction"
|
40 |
model = AutoModelForCausalLM.from_pretrained(model_path)
|
41 |
|
42 |
# Wrap the model with DataParallel if multiple GPUs are available
|
|
|
80 |
|
81 |
## Citation Information
|
82 |
```
|
83 |
+
@misc{MISHANM/Nepali_NLP_eng_to_nepali_Llama3.2_3B_instruction,
|
84 |
author = {Mishan Maurya},
|
85 |
title = {Introducing Fine Tuned LLM for Nepali Language},
|
86 |
year = {2024},
|