Spaces:
No application file
No application file
Delete app.py
Browse files
app.py
DELETED
@@ -1,65 +0,0 @@
|
|
1 |
-
# Import statements
|
2 |
-
from langchain.chat_models import ChatOpenAI
|
3 |
-
from langchain.chains import ConversationChain
|
4 |
-
from langchain.chains.conversation.memory import ConversationBufferWindowMemory
|
5 |
-
from langchain.prompts import (
|
6 |
-
SystemMessagePromptTemplate,
|
7 |
-
HumanMessagePromptTemplate,
|
8 |
-
ChatPromptTemplate,
|
9 |
-
MessagesPlaceholder
|
10 |
-
)
|
11 |
-
import streamlit as st
|
12 |
-
from streamlit_chat import message
|
13 |
-
from utils import *
|
14 |
-
|
15 |
-
# Streamlit setup
|
16 |
-
st.subheader("Legal Guardian")
|
17 |
-
|
18 |
-
# Session state initialization
|
19 |
-
if 'responses' not in st.session_state:
|
20 |
-
st.session_state['responses'] = ["How can I assist you?"]
|
21 |
-
|
22 |
-
if 'requests' not in st.session_state:
|
23 |
-
st.session_state['requests'] = []
|
24 |
-
|
25 |
-
if 'buffer_memory' not in st.session_state:
|
26 |
-
st.session_state.buffer_memory = ConversationBufferWindowMemory(k=3, return_messages=True)
|
27 |
-
|
28 |
-
# Initialize ChatOpenAI and conversation
|
29 |
-
llm = ChatOpenAI(model_name="gpt-3.5-turbo", openai_api_key="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")
|
30 |
-
|
31 |
-
system_msg_template = SystemMessagePromptTemplate.from_template("""
|
32 |
-
Legal Guardian' is a GPT designed to assist with a broad range of legal questions related to children's issues, focusing on laws in India...
|
33 |
-
...It asks for clarification on vague questions to ensure accurate and relevant responses, and treats each query independently for focused assistance.'
|
34 |
-
""")
|
35 |
-
|
36 |
-
human_msg_template = HumanMessagePromptTemplate.from_template("{input}")
|
37 |
-
prompt_template = ChatPromptTemplate.from_messages([system_msg_template, MessagesPlaceholder(variable_name="history"), human_msg_template])
|
38 |
-
conversation = ConversationChain(memory=st.session_state.buffer_memory, prompt=prompt_template, llm=llm, verbose=True)
|
39 |
-
|
40 |
-
# Streamlit UI components
|
41 |
-
response_container = st.container()
|
42 |
-
textcontainer = st.container()
|
43 |
-
|
44 |
-
# Handle user input and display conversation
|
45 |
-
with textcontainer:
|
46 |
-
query = st.text_input("Query: ", key="input")
|
47 |
-
if st.button("Submit"):
|
48 |
-
with st.spinner("typing..."):
|
49 |
-
conversation_string = get_conversation_string()
|
50 |
-
refined_query = query_refiner(conversation_string, query)
|
51 |
-
st.subheader("Refined Query:")
|
52 |
-
st.write(refined_query)
|
53 |
-
context = find_match(refined_query)
|
54 |
-
response = conversation.predict(input=f"Context:\n {context} \n\n Query:\n{query}")
|
55 |
-
st.session_state.requests.append(query)
|
56 |
-
st.session_state.responses.append(response)
|
57 |
-
|
58 |
-
# Display conversation history
|
59 |
-
with response_container:
|
60 |
-
if st.session_state['responses']:
|
61 |
-
st.subheader("Chat History:")
|
62 |
-
for i in range(len(st.session_state['responses'])):
|
63 |
-
message(st.session_state['responses'][i], key=str(i))
|
64 |
-
if i < len(st.session_state['requests']):
|
65 |
-
message(st.session_state["requests"][i], is_user=True, key=str(i) + '_user')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|