ollama_client / app.py
broadfield-dev's picture
Update app.py
55b26e3 verified
raw
history blame
696 Bytes
import gradio as gr
import os
os.system("pip install ollama")
from ollama import chat
stream = chat(
model='llama3.2',
messages=[{'role': 'user', 'content': 'Why is the sky blue?'}],
stream=True,
)
output=""
for chunk in stream:
print(chunk['message']['content'], end='', flush=True)
output+=chunk['message']['content']
yield output
# Create a Gradio interface
iface = gr.Interface(
fn=chat_with_ollama,
inputs=gr.Textbox(label="Enter your prompt"),
outputs=gr.Textbox(label="Response from Ollama"),
title="Ollama Chatbot Client",
description="A Gradio client to interact with the Ollama server."
)
# Launch the Gradio interface
iface.launch()