Spaces:
Sleeping
Sleeping
import openai | |
import requests | |
import os | |
import streamlit as st | |
from PyPDF2 import PdfReader | |
import docx | |
import io | |
from datetime import datetime | |
# Session state initialization | |
if 'chat_histories' not in st.session_state: | |
st.session_state.chat_histories = {} | |
if 'chat_names' not in st.session_state: | |
st.session_state.chat_names = {} | |
if 'chat_models' not in st.session_state: | |
st.session_state.chat_models = {} | |
if 'current_chat_id' not in st.session_state: | |
st.session_state.current_chat_id = None | |
if 'selected_model' not in st.session_state: | |
st.session_state.selected_model = None | |
if 'document_text' not in st.session_state: | |
st.session_state.document_text = None | |
if 'editing_chat_id' not in st.session_state: | |
st.session_state.editing_chat_id = None | |
# Existing helper functions remain unchanged | |
def read_pdf(file): | |
reader = PdfReader(file) | |
return "\n".join(page.extract_text() for page in reader.pages) | |
def read_docx(file): | |
doc = docx.Document(file) | |
return "\n".join(paragraph.text for paragraph in doc.paragraphs) | |
def process_document(uploaded_file): | |
if not uploaded_file: | |
return None | |
try: | |
file_bytes = uploaded_file.getvalue() | |
if uploaded_file.type == "application/pdf": | |
return read_pdf(io.BytesIO(file_bytes)) | |
elif uploaded_file.type == "application/vnd.openxmlformats-officedocument.wordprocessingml.document": | |
return read_docx(io.BytesIO(file_bytes)) | |
return "Ikke-understøttet filformat. Upload venligst PDF eller DOCX fil." | |
except Exception as e: | |
return f"Fejl ved læsning af fil: {str(e)}" | |
def chat_with_gpt(chat_id, prompt): | |
client = openai.OpenAI(api_key=os.getenv("OPENAI_API_KEY")) | |
messages = [{"role": msg["role"], "content": msg["content"]} | |
for msg in st.session_state.chat_histories[chat_id]] | |
messages.append({"role": "user", "content": prompt}) | |
response = client.chat.completions.create( | |
model="gpt-4", | |
messages=messages | |
) | |
return response.choices[0].message.content.strip() | |
def chat_with_claude(chat_id, prompt): | |
url = "https://api.anthropic.com/v1/messages" | |
headers = { | |
"x-api-key": os.getenv("ANTHROPIC_API_KEY"), | |
"anthropic-version": "2023-06-01", | |
"content-type": "application/json" | |
} | |
messages = [{"role": msg["role"], "content": msg["content"]} | |
for msg in st.session_state.chat_histories[chat_id]] | |
messages.append({"role": "user", "content": prompt}) | |
try: | |
response = requests.post(url, headers=headers, json={ | |
"model": "claude-3-opus-20240229", | |
"messages": messages, | |
"max_tokens": 1024, | |
"temperature": 0.7 | |
}) | |
response.raise_for_status() | |
return response.json()["content"][0]["text"].strip() | |
except Exception as e: | |
return f"Der opstod en fejl: {str(e)}" | |
# New: Function to chat with Mistral | |
def chat_with_mistral(chat_id, prompt): | |
client = openai.OpenAI(api_key=os.getenv("MISTRAL_API_KEY"), | |
base_url="https://api.mistral.ai/v1") | |
messages = [{"role": msg["role"], "content": msg["content"]} | |
for msg in st.session_state.chat_histories[chat_id]] | |
messages.append({"role": "user", "content": prompt}) | |
try: | |
response = client.chat.completions.create( | |
model="mistral-large-latest", | |
messages=messages, | |
temperature=0.7, | |
max_tokens=1024 | |
) | |
return response.choices[0].message.content.strip() | |
except Exception as e: | |
return f"Der opstod en fejl: {str(e)}" | |
def create_new_chat(): | |
chat_id = datetime.now().strftime("%Y%m%d_%H%M%S") | |
st.session_state.chat_histories[chat_id] = [] | |
st.session_state.chat_names[chat_id] = f"Chat {datetime.now().strftime('%d/%m %H:%M')}" | |
st.session_state.chat_models[chat_id] = None | |
st.session_state.current_chat_id = chat_id | |
st.session_state.selected_model = None | |
st.session_state.document_text = None | |
return chat_id | |
def delete_chat(chat_id): | |
if chat_id in st.session_state.chat_histories: | |
del st.session_state.chat_histories[chat_id] | |
del st.session_state.chat_names[chat_id] | |
del st.session_state.chat_models[chat_id] | |
if st.session_state.current_chat_id == chat_id: | |
st.session_state.current_chat_id = None | |
st.session_state.selected_model = None | |
def rename_chat(chat_id, new_name): | |
if chat_id in st.session_state.chat_names: | |
st.session_state.chat_names[chat_id] = new_name | |
# Modified: Function to get model icon | |
def get_model_icon(model): | |
if model == "gpt": | |
return "🌐" | |
elif model == "claude": | |
return "🔅" | |
elif model == "mistral": | |
return "🎗️" # New Mistral icon | |
return "💬" | |
def format_chat_name(chat_id): | |
model_icon = get_model_icon(st.session_state.chat_models.get(chat_id)) | |
chat_name = st.session_state.chat_names.get(chat_id, "Unavngivet chat") | |
return f"{model_icon} {chat_name}" | |
# Streamlit UI | |
st.set_page_config(page_title="AI Chat", page_icon="🤖", layout="wide") | |
# Custom CSS remains unchanged | |
st.markdown(""" | |
<style> | |
.main { | |
padding: 0rem 1rem; | |
} | |
.stButton button { | |
width: 100%; | |
border-radius: 20px; | |
height: 45px; | |
font-size: 16px; | |
} | |
.chat-container { | |
background-color: #f8f9fa; | |
border-radius: 10px; | |
padding: 20px; | |
margin: 10px 0; | |
} | |
.model-select { | |
display: flex; | |
gap: 10px; | |
margin-bottom: 20px; | |
} | |
.chat-header { | |
display: flex; | |
justify-content: space-between; | |
align-items: center; | |
margin-bottom: 20px; | |
} | |
</style> | |
""", unsafe_allow_html=True) | |
# Main layout | |
col_sidebar, col_main = st.columns([1, 3]) | |
with col_sidebar: | |
st.markdown("### 📱 Chat Oversigt") | |
if st.button("➕✏️ Ny Chat", key="new_chat"): | |
create_new_chat() | |
st.markdown("---") | |
# Chat history section | |
st.markdown("### 🗃️ Tidligere Chats") | |
for chat_id in list(st.session_state.chat_histories.keys()): | |
col1, col2, col3 = st.columns([3, 1, 1]) | |
with col1: | |
if st.session_state.editing_chat_id == chat_id: | |
new_name = st.text_input("Nyt navn", | |
value=st.session_state.chat_names[chat_id], | |
key=f"rename_{chat_id}") | |
if st.button("Gem", key=f"save_{chat_id}"): | |
rename_chat(chat_id, new_name) | |
st.session_state.editing_chat_id = None | |
st.rerun() | |
else: | |
if st.button(format_chat_name(chat_id), | |
key=f"select_{chat_id}", | |
type="primary" if chat_id == st.session_state.current_chat_id else "secondary"): | |
st.session_state.current_chat_id = chat_id | |
with col2: | |
if st.button("✏️", key=f"edit_{chat_id}"): | |
st.session_state.editing_chat_id = chat_id | |
st.rerun() | |
with col3: | |
if st.button("🗑️", key=f"delete_{chat_id}"): | |
delete_chat(chat_id) | |
st.rerun() | |
# Document upload section remains unchanged | |
st.markdown("### 📄 Upload Dokument") | |
uploaded_file = st.file_uploader("", type=['pdf', 'docx']) | |
if uploaded_file: | |
document_text = process_document(uploaded_file) | |
if document_text: | |
st.session_state.document_text = document_text | |
st.success("✅ Dokument uploadet") | |
with st.expander("📄 Se dokument"): | |
st.text(document_text[:500] + "..." if len(document_text) > 500 else document_text) | |
# Main chat area | |
with col_main: | |
if st.session_state.current_chat_id is None: | |
create_new_chat() | |
# Modified model selection to include Mistral | |
if st.session_state.selected_model is None: | |
st.markdown("### 🤖 Vælg AI Model") | |
col1, col2, col3 = st.columns(3) # Changed to 3 columns | |
with col1: | |
if st.button("🌐 ChatGPT", use_container_width=True): | |
st.session_state.selected_model = "gpt" | |
st.session_state.chat_models[st.session_state.current_chat_id] = "gpt" | |
st.rerun() | |
with col2: | |
if st.button("🔅 Claude", use_container_width=True): | |
st.session_state.selected_model = "claude" | |
st.session_state.chat_models[st.session_state.current_chat_id] = "claude" | |
st.rerun() | |
with col3: # New Mistral button | |
if st.button("🎗️ Mistral", use_container_width=True): | |
st.session_state.selected_model = "mistral" | |
st.session_state.chat_models[st.session_state.current_chat_id] = "mistral" | |
st.rerun() | |
else: | |
# Chat interface | |
current_model = st.session_state.chat_models[st.session_state.current_chat_id] | |
st.markdown(f"### {get_model_icon(current_model)} Chat med {current_model.upper()}") | |
# Chat messages | |
chat_container = st.container() | |
with chat_container: | |
for msg in st.session_state.chat_histories[st.session_state.current_chat_id]: | |
with st.chat_message(msg["role"]): | |
st.write(msg["content"]) | |
# Modified input area to handle Mistral | |
if prompt := st.chat_input("🖊️ Skriv din besked her..."): | |
if st.session_state.document_text: | |
full_prompt = f"""Kontekst dokument: | |
{st.session_state.document_text} | |
Spørgsmål: {prompt}""" | |
else: | |
full_prompt = prompt | |
if st.session_state.selected_model in ["gpt", "claude", "mistral"]: | |
# Add user message | |
st.session_state.chat_histories[st.session_state.current_chat_id].append({ | |
"role": "user", | |
"content": prompt | |
}) | |
# Get and add AI response based on selected model | |
if st.session_state.selected_model == "gpt": | |
response = chat_with_gpt(st.session_state.current_chat_id, full_prompt) | |
elif st.session_state.selected_model == "claude": | |
response = chat_with_claude(st.session_state.current_chat_id, full_prompt) | |
else: # Mistral | |
response = chat_with_mistral(st.session_state.current_chat_id, full_prompt) | |
st.session_state.chat_histories[st.session_state.current_chat_id].append({ | |
"role": "assistant", | |
"content": response | |
}) | |
st.rerun() |