Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,8 +4,7 @@ import torch
|
|
4 |
# Load your model and tokenizer
|
5 |
model_name = "midrees2806/2Krows_uoe_edu"
|
6 |
tokenizer = LlamaTokenizer.from_pretrained(model_name)
|
7 |
-
model = LlamaForCausalLM.from_pretrained(model_name,torch_dtype=torch.float16,device_map="
|
8 |
-
load_in_4bit=True)
|
9 |
|
10 |
# Define the pipeline
|
11 |
pipe = pipeline(task="text-generation", model=model, tokenizer=tokenizer)
|
|
|
4 |
# Load your model and tokenizer
|
5 |
model_name = "midrees2806/2Krows_uoe_edu"
|
6 |
tokenizer = LlamaTokenizer.from_pretrained(model_name)
|
7 |
+
model = LlamaForCausalLM.from_pretrained(model_name,torch_dtype=torch.float16,device_map="cpu")
|
|
|
8 |
|
9 |
# Define the pipeline
|
10 |
pipe = pipeline(task="text-generation", model=model, tokenizer=tokenizer)
|