Spaces:
Sleeping
Sleeping
File size: 5,586 Bytes
cbad54b 3df36f0 cbad54b 3774cbd cbad54b 3774cbd cbad54b 3df36f0 cbad54b 13dd234 cbad54b 3774cbd cbad54b 3774cbd cbad54b 3774cbd cbad54b 3774cbd cbad54b 3774cbd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 |
import streamlit as st
from st_pages import Page, show_pages
from openai import OpenAI
from whisper_stt import whisper_stt
# Set page configuration
st.set_page_config(layout="wide")
show_pages([Page("app.py", "Home", "🏠")])
# Custom CSS for expander header
st.markdown("""
<style>
.streamlit-expanderHeader {
font-size: 24px !important;
font-weight: bold !important;
color: #1E90FF !important;
}
</style>
""", unsafe_allow_html=True)
# JavaScript to preserve scroll position
st.markdown("""
<script>
window.addEventListener("load", function() {
if (window.location.hash) {
var hash = window.location.hash.substring(1);
if (hash && document.getElementById(hash)) {
document.getElementById(hash).scrollIntoView();
}
}
});
document.querySelectorAll('a').forEach(anchor => {
anchor.addEventListener('click', function (e) {
e.preventDefault();
var targetId = this.getAttribute('href').substring(1);
var targetElement = document.getElementById(targetId);
if (targetElement) {
targetElement.scrollIntoView({ behavior: 'smooth' });
}
window.location.hash = targetId;
});
});
</script>
""", unsafe_allow_html=True)
# Initialize session state variables
if 'paused' not in st.session_state:
st.session_state.paused = False
if 'question_text' not in st.session_state:
st.session_state.question_text = ""
if 'submitted' not in st.session_state:
st.session_state.submitted = False
if 'response_content' not in st.session_state:
st.session_state.response_content = ""
if 'stopped' not in st.session_state:
st.session_state.stopped = False
if 'function_call_count' not in st.session_state:
st.session_state.function_call_count = 0
if 'transcribed_text' not in st.session_state:
st.session_state.transcribed_text = ""
if 'last_processed_text' not in st.session_state:
st.session_state.last_processed_text = ""
if 'headers' not in st.session_state:
st.session_state.headers = []
if 'history' not in st.session_state:
st.session_state.history = []
def on_stop():
st.session_state.stopped = True
def handle_enter(key):
if key == "ctrl+enter":
new_question = st.session_state.question_input
print(f"handle_enter called. new_question: '{new_question}'")
print(f"session state: {st.session_state}")
with st.sidebar:
api_key = st.text_input("API Key", key="chatbot_api_key", type="password")
col1, col2 = st.columns(2)
with col1:
# Call whisper_stt without a callback
transcribed_text = whisper_stt(
openai_api_key=api_key,
language='en'
)
if transcribed_text:
st.session_state.question_text = transcribed_text
# Check if new transcription is available
if transcribed_text and transcribed_text != st.session_state.transcribed_text:
st.session_state.transcribed_text = transcribed_text
st.session_state.question_text = transcribed_text
st.session_state.submitted = True
with col2:
st.button(label='Stop', on_click=on_stop)
# Create an input for the question and use new_question directly
new_question = st.text_area("Question",
value=st.session_state.question_text or "",
height=150,
key="question_input",
on_change=handle_enter,
args=("ctrl+enter",)
)
print(f"After text_area, new_question: '{new_question}'")
# Check if new_question has changed and is not empty
if new_question and new_question != st.session_state.question_text:
st.session_state.question_text = new_question
st.session_state.submitted = True
if st.session_state.question_text and not api_key:
st.info("Please add your OpenAI API key to continue.")
st.stop()
if st.session_state.submitted and not st.session_state.stopped:
st.session_state.headers.append(st.session_state.question_text)
client = OpenAI(api_key=api_key)
st.session_state.messages = [{"role": "user", "content": st.session_state.question_text}]
response = client.chat.completions.create(
model="gpt-4o",
messages=st.session_state.messages,
stream=True
)
complete_response = ""
current_expander = st.expander(st.session_state.question_text, expanded=True)
response_placeholder = current_expander.empty()
for chunk in response:
if st.session_state.stopped:
st.session_state.stopped = False
st.session_state.submitted = False
break
else:
if chunk and chunk.choices[0].delta.content:
complete_response += chunk.choices[0].delta.content
response_placeholder.markdown(complete_response, unsafe_allow_html=True)
st.session_state.response_content = complete_response
st.session_state.history.insert(0, {
'question': st.session_state.question_text,
'response': complete_response
})
st.session_state.submitted = False
st.session_state.stopped = False
# Display all questions and answers
for idx, entry in enumerate(st.session_state.history):
if idx == 0 and st.session_state.response_content:
continue # Skip the first item since it's already shown as the current expander
with st.expander(entry['question'], expanded=False):
st.markdown(entry['response'], unsafe_allow_html=True)
|