import gradio as gr import os os.system("pip install ollama") from ollama import chat def chat_with_ollama(prompt): stream = chat( model='llama3.2', messages=[{'role': 'user', 'content': f'{prompt}'}], stream=True, ) output="" for chunk in stream: print(chunk['message']['content'], end='', flush=True) output+=chunk['message']['content'] yield output # Create a Gradio interface iface = gr.Interface( fn=chat_with_ollama, inputs=gr.Textbox(label="Enter your prompt"), outputs=gr.Textbox(label="Response from Ollama"), title="Ollama Chatbot Client", description="A Gradio client to interact with the Ollama server." ) # Launch the Gradio interface iface.launch()