VRai98's picture
Update app.py
6be7291 verified
import ast
import requests
import json
import gradio as gr
from models import *
from pydantic import BaseModel, Field
from workflow import app_graph
from langchain.output_parsers import PydanticOutputParser
from existing_solution import *
class RouterResponse_1(BaseModel):
route :list[str]= Field(description=("A list of keys relevant to the user's query"))
class SummaryResponse_1(BaseModel):
information: str=Field(description=("Condensed information based on the context provided"))
route_op_1=PydanticOutputParser(pydantic_object=RouterResponse_1)
summary_op_1=PydanticOutputParser(pydantic_object=SummaryResponse_1)
async def solution_langchain(query,prev_msgs,json_data):
response=await router_chain_1.ainvoke({"query":query,"previous_messages":prev_msgs,"format_instructions":route_op_1.get_format_instructions()})
routes=route_op_1.parse(response.content).route
print(routes)
if len(routes)!=0:
result = {key: json_data[key] for key in routes}
print(result)
response= await summary_chain_1.ainvoke({"query":query,"data":json.dumps(result),"previous_messages":prev_msgs,
"format_instructions":summary_op_1.get_format_instructions()})
return summary_op_1.parse(response.content).information
else: return "Nothing"
async def process_inputs(input_string,uploaded_file):
if uploaded_file is not None:
try:
with open(uploaded_file) as f:
file_content = json.load(f)
except Exception as e:
print(e)
else:
raise Exception("User data Needed")
input_list=[]
inputs = {"query": input_string,"previous_msgs":input_list,"ui_data":file_content,'information':[]}
extracted_1= await solution_langchain(query=input_string,prev_msgs=input_list,json_data=file_content)
final_state= await app_graph.ainvoke(inputs)
extracted_2=final_state['information']
print("==="*50)
print("LangChain Solution CHATGPT 1\n", extracted_1)
print("==="*50)
print("LangGraph Solution CHATGPT 2\n", extracted_2)
print("==="*50)
url = os.getenv("PERSONALITY_URL") + "/chat"
message_1 = RESPONSE_PROMPT.format(query=input_string, user_information=extracted_1)
payload_1 = {
"message": message_1,
"personality": 'humanish'
}
response_1 = requests.post(url, json=payload_1)
response_1.raise_for_status()
url = os.getenv("PERSONALITY_URL") + "/chat"
message_2= RESPONSE_PROMPT.format(query=input_string, user_information=extracted_2)
payload_2 = {
"message": message_2,
"personality": 'humanish'
}
response_2 = requests.post(url, json=payload_2)
response_2.raise_for_status()
messages = [
ChatMessage(role="user", content=input_string),]
# Create a ChatRequest object
request = ChatRequest(
messages=messages,
user_preferences=file_content,
personality="humanish"
)
# Call the chat endpoint asynchronously
response_3= await chat_endpoint(request)
return response_1.json()["response"], response_2.json()["response"], response_3.response
interface = gr.Interface(
fn=process_inputs,
inputs=[
gr.Textbox(label="Enter a string"),
gr.File(label="Upload a JSON file", type="filepath")
],
outputs=[
gr.Textbox(label="Solution 1 Langchain"),
gr.Textbox(label="Solution 2 Langgraph"),
gr.Textbox(label="Existing Solution"),
],
title="Extracting Relevant UI",
description="Provide a query, previous messages and user_data. Make sure in user data these keys are present :['name', 'age', 'gender', 'preferences', 'personalInformation', 'relatedDemographics', 'history', 'painPoints', 'inefficienciesOrQualityOfLifeImprovements', 'additionalNotes']"
)
interface.launch()