Spaces:
Runtime error
Runtime error
Update appDeepseekCoder.py
Browse files- appDeepseekCoder.py +0 -66
appDeepseekCoder.py
CHANGED
@@ -1,66 +0,0 @@
|
|
1 |
-
from fastapi import FastAPI, Request
|
2 |
-
from fastapi.responses import JSONResponse
|
3 |
-
from langchain_openai import ChatOpenAI
|
4 |
-
from langchain.chains import LLMChain
|
5 |
-
from prompts import maths_assistant_prompt_template
|
6 |
-
from langchain.memory.buffer import ConversationBufferMemory
|
7 |
-
from dotenv import load_dotenv
|
8 |
-
import os
|
9 |
-
import chainlit as cl
|
10 |
-
import uvicorn
|
11 |
-
|
12 |
-
# Load environment variables from .env file
|
13 |
-
load_dotenv()
|
14 |
-
|
15 |
-
api_key = os.getenv('OPENAI_API_KEY')
|
16 |
-
print(f"api key is {api_key}")
|
17 |
-
|
18 |
-
app = FastAPI()
|
19 |
-
|
20 |
-
@app.on_event("startup")
|
21 |
-
async def startup_event():
|
22 |
-
print("Initializing llm...")
|
23 |
-
llm = ChatOpenAI(model='gpt-4o-mini', temperature=0.5, api_key=api_key)
|
24 |
-
print("llm initialized!")
|
25 |
-
conversation_memory = ConversationBufferMemory(memory_key="chat_history", max_len=50, return_messages=True)
|
26 |
-
llm_chain = LLMChain(llm=llm, prompt=maths_assistant_prompt_template, memory=conversation_memory)
|
27 |
-
|
28 |
-
# Initialize Chainlit context
|
29 |
-
|
30 |
-
cl.user_session.set("llm_chain", llm_chain)
|
31 |
-
|
32 |
-
@app.post("/query/")
|
33 |
-
async def query_llm(request: Request):
|
34 |
-
data = await request.json()
|
35 |
-
message = data.get("message")
|
36 |
-
llm_chain = cl.user_session.get("llm_chain")
|
37 |
-
response = await llm_chain.ainvoke({
|
38 |
-
"chat_history": llm_chain.memory.load_memory_variables({})["chat_history"],
|
39 |
-
"question": message
|
40 |
-
}, callbacks=[cl.AsyncLangchainCallbackHandler()])
|
41 |
-
return JSONResponse(content={"response": response["text"]})
|
42 |
-
|
43 |
-
@cl.on_chat_start
|
44 |
-
async def on_chat_start():
|
45 |
-
actions = [
|
46 |
-
cl.Action(name="Probability", value="Probability", description="Select Quiz Topic!"),
|
47 |
-
cl.Action(name="Linear Algebra", value="Linear Algebra", description="Select Quiz Topic!"),
|
48 |
-
cl.Action(name="Accounts", value="Accounts", description="Select Quiz Topic!"),
|
49 |
-
cl.Action(name="Calculus", value="Calculus", description="Select Quiz Topic!")
|
50 |
-
]
|
51 |
-
await cl.Message(content="**Pick a Topic and Let the Quiz Adventure Begin!** ππ", actions=actions).send()
|
52 |
-
|
53 |
-
@cl.action_callback("Linear Algebra")
|
54 |
-
@cl.action_callback("Probability")
|
55 |
-
@cl.action_callback("Accounts")
|
56 |
-
@cl.action_callback("Calculus")
|
57 |
-
async def on_action(action: cl.Action):
|
58 |
-
llm_chain = cl.user_session.get("llm_chain")
|
59 |
-
response = await llm_chain.ainvoke({
|
60 |
-
"chat_history": llm_chain.memory.load_memory_variables({})["chat_history"],
|
61 |
-
"question": f"Quiz me on the topic {action.value}."
|
62 |
-
}, callbacks=[cl.AsyncLangchainCallbackHandler()])
|
63 |
-
await cl.Message(response["text"]).send()
|
64 |
-
|
65 |
-
if __name__ == "__main__":
|
66 |
-
uvicorn.run(app, host="0.0.0.0", port=7860)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|