Spaces:
Sleeping
Sleeping
pip install langchain | |
from langchain.llms import OpenAI | |
# from dotenv import load_dotenv | |
import os | |
# take environment variables from .env | |
# load_dotenv() | |
import streamlit as st | |
# load OpenAI model and get a response | |
def get_openai_response(question): | |
llm = OpenAI( | |
openai_api_key=os.getenv("OPEN_API_KEY"), | |
model_name="gpt-3.5-turbo-instruct", | |
temperature=0.6, | |
) | |
response = llm(question) | |
return response | |
# modify with chain and other stuff | |
## streamlit app | |
st.set_page_config(page_title="Trail Demo") | |
st.header("Sample") | |
st.write("UPDATE: This app uses the 'gpt-3.5-turbo-instruct' model through Langchain") | |
# input = st.text_input("Enter your query: ", key=input) | |
uploaded_file = st.file_uploader('Choose your .pdf file', type="pdf") | |
if uploaded_file is not None: | |
df = extract_data(uploaded_file) | |
response = get_openai_response(df) | |
submit = st.button("Generate") | |
if submit: | |
st.subheader("The response is") | |
st.write(response) |