Spaces:
Sleeping
Sleeping
Gumelar Teja Sukma
commited on
Commit
·
4e121c2
1
Parent(s):
b31e33d
bug fix
Browse files
app.py
CHANGED
@@ -13,8 +13,8 @@ print("Is GPU Available",torch.cuda.is_available()) # Apakah GPU terdeteksi?
|
|
13 |
print("CPU cores:", psutil.cpu_count())
|
14 |
print("RAM (GB):", psutil.virtual_memory().total / (1024**3))
|
15 |
|
16 |
-
|
17 |
-
model_name_or_path = "meta-llama/Llama-2-7b-chat-hf"
|
18 |
# model_name_or_path = "TheBloke/Llama-2-7B-Chat-GGUF"
|
19 |
# model_name_or_path = "TheBloke/Mistral-7B-v0.1-GPTQ"
|
20 |
# model_name_or_path = "unsloth/DeepSeek-R1-0528-GGUF" # 3x lebih cepat dari Mistral-7B
|
|
|
13 |
print("CPU cores:", psutil.cpu_count())
|
14 |
print("RAM (GB):", psutil.virtual_memory().total / (1024**3))
|
15 |
|
16 |
+
model_name_or_path = "TheBloke/Llama-2-7B-Chat-GPTQ"
|
17 |
+
# model_name_or_path = "meta-llama/Llama-2-7b-chat-hf"
|
18 |
# model_name_or_path = "TheBloke/Llama-2-7B-Chat-GGUF"
|
19 |
# model_name_or_path = "TheBloke/Mistral-7B-v0.1-GPTQ"
|
20 |
# model_name_or_path = "unsloth/DeepSeek-R1-0528-GGUF" # 3x lebih cepat dari Mistral-7B
|