ollama_client / app.py
broadfield-dev's picture
Update app.py
81de300 verified
raw
history blame contribute delete
902 Bytes
import gradio as gr
import os
os.system("pip install ollama")
import ollama
ollama.pull('hf.co/mlabonne/Meta-Llama-3.1-8B-Instruct-abliterated-GGUF:Q2_K')
from ollama import chat
def chat_with_ollama(prompt):
stream = chat(
model='hf.co/mlabonne/Meta-Llama-3.1-8B-Instruct-abliterated-GGUF:Q2_K',
messages=[{'role': 'user', 'content': f'{prompt}'}],
stream=True,
)
output=""
for chunk in stream:
print(chunk['message']['content'], end='', flush=True)
output+=chunk['message']['content']
yield output
# Create a Gradio interface
iface = gr.Interface(
fn=chat_with_ollama,
inputs=gr.Textbox(label="Enter your prompt"),
outputs=gr.Textbox(label="Response from Ollama"),
title="Ollama Chatbot Client",
description="A Gradio client to interact with the Ollama server."
)
# Launch the Gradio interface
iface.launch()