Update README.md
Browse files
README.md
CHANGED
|
@@ -56,8 +56,8 @@ def generate_prompt(description, inputs, outputs):
|
|
| 56 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 57 |
|
| 58 |
# load model and tokenizer
|
| 59 |
-
model = AutoModelForCausalLM.from_pretrained(
|
| 60 |
-
tokenizer = AutoTokenizer.from_pretrained(
|
| 61 |
|
| 62 |
# loading model for inference
|
| 63 |
model.eval()
|
|
|
|
| 56 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 57 |
|
| 58 |
# load model and tokenizer
|
| 59 |
+
model = AutoModelForCausalLM.from_pretrained("iamtarun/pycompetitive-codegen350M-qlora", device_map="auto")
|
| 60 |
+
tokenizer = AutoTokenizer.from_pretrained("iamtarun/pycompetitive-codegen350M-qlora")
|
| 61 |
|
| 62 |
# loading model for inference
|
| 63 |
model.eval()
|