adinarayana commited on
Commit
f76e3c0
·
verified ·
1 Parent(s): 53eaaf2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -60
app.py CHANGED
@@ -1,8 +1,12 @@
1
- from openai import OpenAI
2
- from langchain import OpenAI
 
 
3
 
4
- # from transformers import pipeline
5
 
 
 
 
6
  import os
7
 
8
  # take environment variables from .env
@@ -14,73 +18,32 @@ import streamlit as st
14
 
15
 
16
  def get_openai_response(question):
17
- llm = OpenAI(
18
- openai_api_key=os.getenv("OPEN_API_KEY"),
19
- model_name="gpt-3.5-turbo-instruct",
20
- temperature=0.6,
21
- )
22
- response = llm(question)
 
23
  return response
24
  # modify with chain and other stuff
25
 
26
 
27
  ## streamlit app
28
 
29
- st.set_page_config(page_title="QandA Demo")
30
- st.header("QandA - Langchain Application")
31
- st.write("UPDATE: This app uses the 'gpt-3.5-turbo-instruct' model through Langchain to answer queries")
32
 
33
 
34
- input = st.text_input("Enter your query: ", key=input)
35
- response = get_openai_response(input)
 
 
 
36
 
37
 
38
  submit = st.button("Generate")
39
  if submit:
40
  st.subheader("The response is")
41
- st.write(response)
42
-
43
- # pip install langchain
44
-
45
- # from langchain.llms import OpenAI
46
-
47
- # # from dotenv import load_dotenv
48
- # import os
49
-
50
- # # take environment variables from .env
51
- # # load_dotenv()
52
-
53
- # import streamlit as st
54
-
55
- # # load OpenAI model and get a response
56
-
57
-
58
- # def get_openai_response(question):
59
- # llm = OpenAI(
60
- # openai_api_key=os.getenv("OPEN_API_KEY"),
61
- # model_name="gpt-3.5-turbo-instruct",
62
- # temperature=0.6,
63
- # )
64
- # response = llm(question)
65
- # return response
66
- # # modify with chain and other stuff
67
-
68
-
69
- # ## streamlit app
70
-
71
- # st.set_page_config(page_title="Trail Demo")
72
- # st.header("Sample")
73
- # st.write("UPDATE: This app uses the 'gpt-3.5-turbo-instruct' model through Langchain")
74
-
75
-
76
- # # input = st.text_input("Enter your query: ", key=input)
77
- # uploaded_file = st.file_uploader('Choose your .pdf file', type="pdf")
78
- # if uploaded_file is not None:
79
- # df = extract_data(uploaded_file)
80
- # response = get_openai_response(df)
81
-
82
-
83
- # submit = st.button("Generate")
84
- # if submit:
85
- # st.subheader("The response is")
86
- # st.write(response)
 
1
+ from transformers import pipeline
2
+
3
+ import streamlit as st
4
+
5
 
 
6
 
7
+ from langchain.llms import OpenAI
8
+
9
+ # from dotenv import load_dotenv
10
  import os
11
 
12
  # take environment variables from .env
 
18
 
19
 
20
  def get_openai_response(question):
21
+ # llm = OpenAI(
22
+ # openai_api_key=os.getenv("OPEN_API_KEY"),
23
+ # model_name="gpt-3.5-turbo-instruct",
24
+ # temperature=0.6,
25
+ # )
26
+ response = pipeline("summarization", model="stevhliu/my_awesome_billsum_model")
27
+ # response = llm(question)
28
  return response
29
  # modify with chain and other stuff
30
 
31
 
32
  ## streamlit app
33
 
34
+ st.set_page_config(page_title="Trail Demo")
35
+ st.header("Sample")
36
+ st.write("UPDATE: This app uses the 'gpt-3.5-turbo-instruct' model through Langchain")
37
 
38
 
39
+ # input = st.text_input("Enter your query: ", key=input)
40
+ uploaded_file = st.file_uploader('Choose your .pdf file', type="pdf")
41
+ if uploaded_file is not None:
42
+ df = extract_data(uploaded_file)
43
+ response = get_openai_response(df)
44
 
45
 
46
  submit = st.button("Generate")
47
  if submit:
48
  st.subheader("The response is")
49
+ st.write(response)