Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| from openai import OpenAI | |
| import os | |
| # Set up OpenAI client | |
| client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) | |
| st.title("LeetCode to Real-World Interview Question Generator") | |
| # Load the system prompt from the file | |
| with open("prompt.txt", "r") as file: | |
| system_prompt = file.read() | |
| # Initialize chat history | |
| if "messages" not in st.session_state: | |
| st.session_state.messages = [{"role": "assistant", "content": system_prompt}] | |
| # Display chat messages from history on app rerun | |
| for message in st.session_state.messages[1:]: # Skip the system message | |
| with st.chat_message(message["role"]): | |
| st.markdown(message["content"]) | |
| # Function to generate response using OpenAI API | |
| def generate_response(prompt): | |
| response = client.chat.completions.create( | |
| model="gpt4-o", | |
| messages=st.session_state.messages + [{"role": "system", "content": prompt}] | |
| ) | |
| return response.choices[0].message.content | |
| # React to user input | |
| if prompt := st.chat_input("Enter a LeetCode question to transform:"): | |
| # Display user message in chat message container | |
| st.chat_message("user").markdown(prompt) | |
| # Add user message to chat history | |
| st.session_state.messages.append({"role": "user", "content": prompt}) | |
| # Generate response | |
| response = generate_response(prompt) | |
| # Display assistant response in chat message container | |
| with st.chat_message("assistant"): | |
| st.markdown(response) | |
| # Add assistant response to chat history | |
| st.session_state.messages.append({"role": "assistant", "content": response}) | |
| st.sidebar.markdown(""" | |
| ## About | |
| This is a LeetCode to Real-World Interview Question Generator powered by OpenAI's GPT-4. | |
| Enter a LeetCode question to transform it into a real-world interview scenario! | |
| """) |