Spaces:
Sleeping
Sleeping
import os | |
from openai import OpenAI | |
import gradio as gr | |
from typing import List, Optional, Tuple, Dict, Iterator | |
# 🛠 Безопасная загрузка API-ключа | |
#GROQ_API_KEY = "ВАШ_КЛЮЧ" # Укажите API-ключ явно | |
GROQ_API_KEY = os.environ.get('GROQ_API_KEY') | |
# 🛠 Инициализация клиента | |
client = OpenAI( | |
api_key=GROQ_API_KEY, | |
base_url="https://api.groq.com/openai/v1", | |
) | |
# 🛠 Форматы данных | |
History = List[Tuple[str, str]] | |
Messages = List[Dict[str, str]] | |
# 🛠 Функции работы с историей чата | |
def clear_session() -> Tuple[str, History]: | |
return '', [] | |
def modify_system_session(system: str) -> Tuple[str, str, History]: | |
return system, system, [] | |
def history_to_messages(history: History, system: str) -> Messages: | |
messages = [{'role': 'system', 'content': system}] | |
for user_msg, ai_response in history: | |
messages.append({'role': 'user', 'content': user_msg}) | |
messages.append({'role': 'assistant', 'content': ai_response}) | |
return messages | |
# 🛠 Функция общения с моделью | |
def model_chat(query: Optional[str], history: Optional[History], system: str, model_name: str) -> Iterator[List[Dict[str, str]]]: | |
if not query: | |
return | |
if history is None: | |
history = [] | |
messages = history_to_messages(history, system) | |
messages.append({'role': 'user', 'content': query}) | |
try: | |
response = client.chat.completions.create( | |
model=model_name, | |
messages=messages, | |
stream=True | |
) | |
full_response = "" | |
for chunk in response: | |
if hasattr(chunk, "choices") and chunk.choices: | |
delta = chunk.choices[0].delta | |
content = getattr(delta, "content", "") | |
if content: | |
full_response += content | |
yield [{"role": "assistant", "content": full_response}], history + [(query, full_response)], system | |
except Exception as e: | |
import traceback | |
error_message = traceback.format_exc() | |
yield [{"role": "system", "content": f"Ошибка: {error_message}"}], history, system | |
# 🛠 Корректное отображение выбора модели | |
def choose_radio(model_name: str, system: str): | |
return ( | |
gr.Markdown(value=f"<center><font size=8>{model_name}👾</center>"), | |
gr.Chatbot(label=model_name, type='messages'), | |
system, | |
system, | |
"" | |
) | |
# 🛠 Основной интерфейс | |
def main(): | |
with gr.Blocks() as demo: | |
with gr.Row(): | |
options_coder = [ | |
"qwen-2.5-coder-32b", | |
"qwen-qwq-32b", | |
"deepseek-r1-distill-qwen-32b", | |
"deepseek-r1-distill-llama-70b", | |
"llama-3.2-90b-vision-preview", | |
"llama-3.3-70b-versatile", | |
"llama-3.1-8b-instant" | |
] | |
radio = gr.Radio(choices=options_coder, label="Модель:", value="qwen-2.5-coder-32b") | |
with gr.Row(): | |
with gr.Accordion(): | |
mark_ = gr.Markdown("<center><font size=8>Qwen2.5-Coder-32B-Instruct Bot👾</center>") | |
system_input = gr.Textbox(value="Ваша задача — проверка синтаксиса и структуры предоставленного кода.", lines=1, label='System') | |
modify_system = gr.Button("🛠️ Установить системный промпт и очистить историю") | |
system_state = gr.Textbox(value="", visible=False) | |
chatbot = gr.Chatbot(label='Qwen2.5-Coder-32B-Instruct', type='messages') | |
textbox = gr.Textbox(lines=1, label='Ввод') | |
with gr.Row(): | |
clear_history = gr.Button("🧹 Очистить историю") | |
submit = gr.Button("🚀 Отправить") | |
textbox.submit(model_chat, inputs=[textbox, chatbot, system_state, radio], outputs=[chatbot, chatbot, system_input]) | |
submit.click(model_chat, inputs=[textbox, chatbot, system_state, radio], outputs=[chatbot, chatbot, system_input]) | |
clear_history.click(fn=clear_session, inputs=[], outputs=[textbox, chatbot, system_input]) | |
modify_system.click(fn=modify_system_session, inputs=[system_input], outputs=[system_state, system_input, chatbot]) | |
radio.change(choose_radio, inputs=[radio, system_input], outputs=[mark_, chatbot, system_state, system_input, textbox]) | |
demo.queue(api_open=False) | |
demo.launch() | |
# Функция теста API | |
def test_api(): | |
chat_completion = client.chat.completions.create( | |
model="deepseek-r1-distill-llama-70b", | |
messages=[{"role": "user", "content": "What's the purpose of Generative AI?"}], | |
) | |
print(chat_completion.choices[0].message.content) | |
if __name__ == "__main__": | |
main() | |
test_api() | |