Spaces:
Runtime error
Runtime error
ffreemt
commited on
Commit
·
e53c8cf
1
Parent(s):
6ba78c1
Update torch.cuda.is_available() for device/device_type
Browse files
app.py
CHANGED
|
@@ -261,8 +261,8 @@ def gen_local_llm(model_id="TheBloke/vicuna-7B-1.1-HF"):
|
|
| 261 |
model = LlamaForCausalLM.from_pretrained(
|
| 262 |
model_id,
|
| 263 |
# load_in_8bit=True, # set these options if your GPU supports them!
|
| 264 |
-
|
| 265 |
-
|
| 266 |
low_cpu_mem_usage=True
|
| 267 |
)
|
| 268 |
else:
|
|
|
|
| 261 |
model = LlamaForCausalLM.from_pretrained(
|
| 262 |
model_id,
|
| 263 |
# load_in_8bit=True, # set these options if your GPU supports them!
|
| 264 |
+
device_map="auto",
|
| 265 |
+
torch_dtype=torch.float16,
|
| 266 |
low_cpu_mem_usage=True
|
| 267 |
)
|
| 268 |
else:
|