TorchAO Testing commited on
Commit
5c390fc
·
verified ·
1 Parent(s): d99de3c

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +3 -1
README.md CHANGED
@@ -2,6 +2,7 @@
2
  library_name: transformers
3
  tags: []
4
  ---
 
5
  import torch
6
  from transformers import AutoModelForCausalLM, AutoTokenizer, TorchAoConfig
7
 
@@ -37,4 +38,5 @@ generated_ids = quantized_model.generate(**inputs, max_new_tokens=128)
37
  output_text = tokenizer.batch_decode(
38
  generated_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False
39
  )
40
- print("Response:", output_text[0][len(prompt) :])
 
 
2
  library_name: transformers
3
  tags: []
4
  ---
5
+ ```
6
  import torch
7
  from transformers import AutoModelForCausalLM, AutoTokenizer, TorchAoConfig
8
 
 
38
  output_text = tokenizer.batch_decode(
39
  generated_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False
40
  )
41
+ print("Response:", output_text[0][len(prompt) :])
42
+ ```