Spaces:
Runtime error
Runtime error
import os | |
import gradio as gr | |
from langchain.chat_models import ChatOpenAI | |
from langchain import LLMChain, PromptTemplate | |
from langchain.memory import ConversationBufferMemory | |
OPENAI_API_KEY=os.getenv('OPENAI_API_KEY') | |
os.environ["OPENAI_API_BASE"] = "https://openrouter.ai/api/v1" | |
template = """The following is a conversation between a friendly tech-savvy chatbot and a user. The chatbot gives short, helpful answers and avoids repeating the user's input. | |
{chat_history} | |
User: {user_message} | |
Chatbot:""" | |
prompt = PromptTemplate( | |
input_variables=["chat_history", "user_message"], template=template | |
) | |
memory = ConversationBufferMemory(memory_key="chat_history") | |
llm = ChatOpenAI( | |
temperature=0.5, | |
model_name="sarvamai/sarvam-m:free", | |
openai_api_base="https://openrouter.ai/api/v1", | |
openai_api_key=os.getenv("OPENAI_API_KEY"), | |
verbose=True | |
) | |
llm_chain = LLMChain( | |
llm=llm, | |
prompt=prompt, | |
memory=memory | |
) | |
def get_text_response(user_message, history): | |
response = llm_chain.predict( | |
user_message=user_message, | |
chat_history=llm_chain.memory.buffer | |
) | |
return response | |
demo = gr.ChatInterface(get_text_response, examples=["- What programming language do you find the most fascinating, and why?","Which tech gadget do you consider a game-changer for the industry?","Tell me about a challenging coding problem you solved recently."]) | |
if __name__ == "__main__": | |
demo.launch() #To create a public link, set `share=True` in `launch()`. To enable errors and logs, set `debug=True` in `launch()`. | |