Spaces:
Runtime error
Runtime error
| import streamlit as st | |
| import torch | |
| from transformers import AutoModelForCausalLM, AutoTokenizer | |
| # Load the model and tokenizer | |
| model_id = "google/gemma-7b" # Replace with "google/gemma-7b-it" for instruction tuning | |
| tokenizer = AutoTokenizer.from_pretrained(model_id) | |
| model = AutoModelForCausalLM.from_pretrained(model_id) | |
| # Function to generate responses based on user messages | |
| def generate_response(messages): | |
| input_ids = tokenizer.apply_chat_template(messages, return_tensors="pt").to(model.device) | |
| outputs = model.generate(input_ids, max_new_tokens=100) | |
| generated_response = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
| return generated_response | |
| st.title("Gemma Chatbot") | |
| messages = [] | |
| user_input = st.text_input("You:", "") | |
| if st.button("Send"): | |
| if user_input: | |
| messages.append({"role": "user", "content": user_input}) | |
| bot_response = generate_response(messages) | |
| messages.append({"role": "assistant", "content": bot_response}) | |
| else: | |
| st.warning("Please enter a message to process.") | |
| # Display conversation | |
| for message in messages: | |
| if message["role"] == "user": | |
| st.text_input("You:", value=message["content"], disabled=True) | |
| elif message["role"] == "assistant": | |
| st.text_area("Gemma:", value=message["content"], disabled=True) | |