Spaces:
Runtime error
Runtime error
Commit
·
222b9f2
1
Parent(s):
244b31d
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,6 +1,14 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
import time
|
| 3 |
from ctransformers import AutoModelForCausalLM
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
|
| 5 |
# Set gpu_layers to the number of layers to offload to GPU. Set to 0 if no GPU acceleration is available on your system.
|
| 6 |
model = AutoModelForCausalLM.from_pretrained("TheBloke/Mistral-7B-Instruct-v0.1-GGUF", model_file="mistral-7b-instruct-v0.1.Q5_K_S.gguf", model_type="mistral", gpu_layers=0)
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
import time
|
| 3 |
from ctransformers import AutoModelForCausalLM
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
from typing import Iterable
|
| 6 |
+
import gradio as gr
|
| 7 |
+
from gradio.themes.base import Base
|
| 8 |
+
from gradio.themes.utils import colors, fonts, sizes
|
| 9 |
+
import subprocess
|
| 10 |
+
|
| 11 |
+
from huggingface_hub import hf_hub_download
|
| 12 |
|
| 13 |
# Set gpu_layers to the number of layers to offload to GPU. Set to 0 if no GPU acceleration is available on your system.
|
| 14 |
model = AutoModelForCausalLM.from_pretrained("TheBloke/Mistral-7B-Instruct-v0.1-GGUF", model_file="mistral-7b-instruct-v0.1.Q5_K_S.gguf", model_type="mistral", gpu_layers=0)
|