physics_master / app.py
mmaleki92's picture
Update app.py
c0dd994 verified
raw
history blame
2.77 kB
import streamlit as st
from llama_cpp import Llama
import os
import json
import time
# Function to convert message history to prompt
def prompt_from_messages(messages):
prompt = ''
for message in messages:
prompt += f"<|start_header_id|>{message['role']}<|end_header_id|>\n\n"
prompt += f"{message['content']}<|eot_id|>"
prompt = prompt[:-10]
return prompt
# Initialize the Llama model
llm = Llama.from_pretrained(
repo_id='gallen881/Llama-3-8B-Physics_Master-GGUF',
filename='unsloth.Q4_K_M.gguf',
n_ctx=2048,
verbose=False
)
# Set up Streamlit App Layout
st.title("Physics Master Chatbot")
st.markdown("Ask **Physics Master** any physics-related question.")
# Initialize chat history in session state
if 'messages' not in st.session_state:
st.session_state.messages = [
{
'role': 'system',
'content': 'You are a professional physics master. Answer physics questions directly without using any external resources.'
}
]
st.session_state.chat_time = time.time()
# Display chat history
for message in st.session_state.messages:
if message['role'] == 'user':
st.write(f"**You:** {message['content']}")
else:
st.write(f"**Physics Master:** {message['content']}")
# Use a form to manage user input and submission
with st.form(key="input_form", clear_on_submit=True):
user_input = st.text_input("Ask a question", key="user_input")
submit_button = st.form_submit_button(label="Send")
if submit_button and user_input:
# Append user message
user_message = {'role': 'user', 'content': user_input}
st.session_state.messages.append(user_message)
# Prepare to get the response from Physics Master
st.write('Physics Master is thinking...')
# Initialize an empty string to accumulate the response
full_response = ""
# Fetch response tokens and accumulate them
response = llm.create_chat_completion(
messages=st.session_state.messages,
stream=True
)
for chunk in response:
delta = chunk['choices'][0]['delta']
if 'role' in delta:
st.session_state.messages.append({'role': delta['role'], 'content': ''})
elif 'content' in delta:
token = delta['content']
# Accumulate tokens into the full response
full_response += token
# Once the full response is received, append it to the chat history
st.session_state.messages[-1]['content'] = full_response
# Display the full response as a paragraph
st.write(f"**Physics Master:** {full_response}")
# Save the chat history to a JSON file
with open('chat_history.json', 'w', encoding='utf8') as file:
json.dump(st.session_state.messages, file, indent=4)