Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -28,7 +28,7 @@ model = SentenceTransformer("all-MiniLM-L6-v2").to(device)
|
|
| 28 |
with open("prompt.txt", "r") as file:
|
| 29 |
system_prompt = file.read()
|
| 30 |
|
| 31 |
-
st.title("
|
| 32 |
|
| 33 |
# Initialize chat history
|
| 34 |
if "messages" not in st.session_state:
|
|
@@ -39,8 +39,6 @@ for message in st.session_state.messages[1:]: # Skip the system message
|
|
| 39 |
with st.chat_message(message["role"]):
|
| 40 |
st.markdown(message["content"])
|
| 41 |
|
| 42 |
-
from sklearn.metrics.pairwise import cosine_similarity
|
| 43 |
-
|
| 44 |
# Function to find the top 1 most similar question based on user input
|
| 45 |
def find_top_question(query):
|
| 46 |
# Generate embedding for the query
|
|
@@ -105,6 +103,6 @@ if prompt := st.chat_input("Enter a LeetCode-related query (e.g., 'google backtr
|
|
| 105 |
|
| 106 |
st.sidebar.markdown("""
|
| 107 |
## About
|
| 108 |
-
This is a
|
| 109 |
-
Enter a
|
| 110 |
""")
|
|
|
|
| 28 |
with open("prompt.txt", "r") as file:
|
| 29 |
system_prompt = file.read()
|
| 30 |
|
| 31 |
+
st.title("Real-World Programming Quesion Mock Interview")
|
| 32 |
|
| 33 |
# Initialize chat history
|
| 34 |
if "messages" not in st.session_state:
|
|
|
|
| 39 |
with st.chat_message(message["role"]):
|
| 40 |
st.markdown(message["content"])
|
| 41 |
|
|
|
|
|
|
|
| 42 |
# Function to find the top 1 most similar question based on user input
|
| 43 |
def find_top_question(query):
|
| 44 |
# Generate embedding for the query
|
|
|
|
| 103 |
|
| 104 |
st.sidebar.markdown("""
|
| 105 |
## About
|
| 106 |
+
This is a Real-World Interview Question Generator powered by OpenAI's API.
|
| 107 |
+
Enter a company name, topic and level of difficulty, and it will transform a relevant question into a real-world interview scenario!
|
| 108 |
""")
|