|
import os |
|
|
|
import docx2txt |
|
from dotenv import load_dotenv |
|
|
|
from langchain.chat_models import ChatOpenAI |
|
from langchain.schema import ( |
|
SystemMessage, |
|
HumanMessage, |
|
AIMessage |
|
) |
|
from langchain.embeddings.openai import OpenAIEmbeddings |
|
from langchain.callbacks.base import BaseCallbackHandler |
|
|
|
import streamlit as st |
|
|
|
load_dotenv() |
|
|
|
|
|
class StreamHandler(BaseCallbackHandler): |
|
def __init__(self, container, initial_text=""): |
|
self.container = container |
|
self.text = initial_text |
|
|
|
def on_llm_new_token(self, token: str, **kwargs) -> None: |
|
self.text += token |
|
self.container.markdown(self.text) |
|
|
|
|
|
def init_gpt(gpt_model, stream_handler): |
|
global llm |
|
llm = ChatOpenAI( |
|
temperature=0.3, |
|
model=gpt_model, |
|
streaming=True, |
|
callbacks=[stream_handler] |
|
) |
|
|
|
|
|
embeddings = OpenAIEmbeddings() |
|
|
|
|
|
def generate_content(query, knowledge_base): |
|
|
|
system_prompt = f"""You are a professional writer of motivational letters.\ |
|
You will be given a content from a knowledge base below, delimited by triple \ |
|
backticks. Your job is to use knowledge from this data and write a \ |
|
motivational letter for graduate school application. Only write content \ |
|
using data from the knowledgebase, do not claim facts from outside of it. \ |
|
Make the letter very personal with regards to the knowledge base. |
|
|
|
Knowledge Base: ```{knowledge_base}``` |
|
""" |
|
|
|
|
|
|
|
messages = [SystemMessage(content=system_prompt)] |
|
for i in range(len(query)): |
|
if i % 2 == 0: |
|
temp_query = HumanMessage(content=query[i]['content']) |
|
else: |
|
temp_query = AIMessage(content=query[i]['content']) |
|
messages.append(temp_query) |
|
response = llm(messages) |
|
return response.content |
|
|
|
|
|
def main(): |
|
st.title("GradGPT π€") |
|
st.header("ChatGPT Powered Motivational Letter writer") |
|
|
|
uploaded_file = st.file_uploader("Upload a word file", type="docx") |
|
knowledge_base = "" |
|
if uploaded_file is not None: |
|
|
|
knowledge_base = docx2txt.process(uploaded_file) |
|
|
|
|
|
if "messages" not in st.session_state: |
|
st.session_state.messages = [] |
|
|
|
for message in st.session_state.messages: |
|
with st.chat_message(message["role"]): |
|
st.markdown(message["content"]) |
|
|
|
if prompt := st.chat_input("Enter your queries here."): |
|
st.session_state.messages.append({"role": "user", "content": prompt}) |
|
with st.chat_message("user"): |
|
st.markdown(prompt) |
|
|
|
with st.chat_message("assistant"): |
|
|
|
stream_handler = StreamHandler(st.empty()) |
|
init_gpt("gpt-3.5-turbo-16k", stream_handler) |
|
content = generate_content( |
|
st.session_state.messages, knowledge_base |
|
) |
|
st.session_state.messages.append( |
|
{"role": "assistant", "content": content} |
|
) |
|
|
|
|
|
with st.sidebar: |
|
|
|
if st.button("remove previous message"): |
|
if len(st.session_state.messages) >= 2: |
|
st.session_state.messages = st.session_state.messages[:-2] |
|
|
|
|
|
if __name__ == '__main__': |
|
main() |