Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
-
import
|
| 3 |
import os
|
| 4 |
|
| 5 |
-
# Set up OpenAI
|
| 6 |
-
|
| 7 |
|
| 8 |
st.title("LeetCode to Real-World Interview Question Generator")
|
| 9 |
|
|
@@ -22,13 +22,13 @@ for message in st.session_state.messages[1:]: # Skip the system message
|
|
| 22 |
|
| 23 |
# Function to generate response using OpenAI API
|
| 24 |
def generate_response(prompt):
|
| 25 |
-
response =
|
| 26 |
model="gpt-4",
|
| 27 |
messages=st.session_state.messages + [{"role": "user", "content": prompt}],
|
| 28 |
max_tokens=1000,
|
| 29 |
temperature=0.7,
|
| 30 |
)
|
| 31 |
-
return response.choices[0].message
|
| 32 |
|
| 33 |
# React to user input
|
| 34 |
if prompt := st.chat_input("Enter a LeetCode question to transform:"):
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
+
from openai import OpenAI
|
| 3 |
import os
|
| 4 |
|
| 5 |
+
# Set up OpenAI client
|
| 6 |
+
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
|
| 7 |
|
| 8 |
st.title("LeetCode to Real-World Interview Question Generator")
|
| 9 |
|
|
|
|
| 22 |
|
| 23 |
# Function to generate response using OpenAI API
|
| 24 |
def generate_response(prompt):
|
| 25 |
+
response = client.chat.completions.create(
|
| 26 |
model="gpt-4",
|
| 27 |
messages=st.session_state.messages + [{"role": "user", "content": prompt}],
|
| 28 |
max_tokens=1000,
|
| 29 |
temperature=0.7,
|
| 30 |
)
|
| 31 |
+
return response.choices[0].message.content
|
| 32 |
|
| 33 |
# React to user input
|
| 34 |
if prompt := st.chat_input("Enter a LeetCode question to transform:"):
|