YUGO-GPT / app.py
cigol123's picture
Update app.py
d10f679 verified
import gradio as gr
from llama_cpp import Llama
llm = Llama(
model_path="yugogpt-q4_k_s.gguf",
n_ctx=4096, # Doubled context length
n_threads=8, # Increased threads
n_batch=1024, # Increased batch size
use_mlock=True,
use_mmap=True,
n_gpu_layers=0, # Set this to higher number if GPU available
verbose=False # Reduced logging for better performance
)
def format_chat_history(history):
formatted_history = ""
for user_msg, assistant_msg in history:
formatted_history += f"USER: {user_msg}\nA: {assistant_msg}\n"
return formatted_history
def chat(message, history):
system_prompt = """Ti si YugoGPT, profesionalni AI asistent koji daje precizne i korisne informacije.
PRAVILA:
- Ne izmišljam informacije
- Ako nemam trženu informaciju to jasno naglasim
- Dajem proverene jasne i konkretne informacije
- Koristim precizan srpski jezik
- Fokusiram se na činjenice
- Odgovaram direktno i efikasno
- Održavam profesionalan ton"""
chat_history = format_chat_history(history)
full_prompt = f"""SYSTEM: {system_prompt}
KONTEKST:
{chat_history}
USER: {message}
A: """
response = llm(
full_prompt,
max_tokens=4096, # Increased max tokens
temperature=0.7, # Keeping it precise
top_p=0.1,
repeat_penalty=1.2,
top_k=20,
stop=["USER:", "\n\n"],
stream=True
)
partial_message = ""
for chunk in response:
if chunk and chunk['choices'][0]['text']:
partial_message += chunk['choices'][0]['text']
yield partial_message
demo = gr.ChatInterface(
fn=chat,
title="YugoGPT Stručni Asistent",
description="Profesionalni izvor informacija i stručne pomoći, PAŽNJA, ZNA DA LAŽE!!!",
examples=[
"Koji su osnovni principi relacionih baza podataka?",
"Objasnite kako funkcioniše HTTP protokol",
"Koje su glavne komponente računara i njihove funkcije?"
]
)
if __name__ == "__main__":
demo.queue().launch(
server_name="0.0.0.0",
server_port=7860,
share=False
)