Spaces:
Sleeping
Sleeping
Cigolitna123
commited on
Commit
·
d818664
0
Parent(s):
Initial setup
Browse files- mk.py +22 -0
- requirements.txt +2 -0
mk.py
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from llama_cpp import Llama
|
3 |
+
|
4 |
+
llm = Llama(
|
5 |
+
model_path="MKLLM-7B-Instruct-Q4_0.gguf",
|
6 |
+
n_ctx=2048
|
7 |
+
)
|
8 |
+
|
9 |
+
def chat(message, history):
|
10 |
+
response = llm.create_completion(
|
11 |
+
f"USER: {message}\nASSISTANT:",
|
12 |
+
max_tokens=512,
|
13 |
+
temperature=0.7
|
14 |
+
)
|
15 |
+
return response['choices'][0]['text']
|
16 |
+
|
17 |
+
demo = gr.ChatInterface(
|
18 |
+
chat,
|
19 |
+
title="MKLLM Chat",
|
20 |
+
)
|
21 |
+
|
22 |
+
demo.launch()
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
gradio
|
2 |
+
llama-cpp-python
|