import streamlit as st from transformers import pipeline from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-chat-hf") model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b-chat-hf") pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, max_length=200)) # Create a banner using Markdown st.markdown( """ <div style="min-width: 1000px;"> <div style="background-color:#f63366;padding:10px;border-radius:10px;"> <h1 style="color:white;text-align:center;">Red Octopus</h1> </div> <div style="color:black;text-align:center;"> <h1>Welcome to the Proposition Management Tool</h1> </div> </div> """, unsafe_allow_html=True ) selectedCity = st.selectbox("Please select the City and the Bank Product for Your Proposition.", ["CharlesTown", "Limburg"]) selectedProduct = st.selectbox("Please select the Product", ["Current", "Mortage", "Credit Card", "Crypto"]) userProposal = st.text_input("Enter your Proposition for Select City and Product") submit_button = st.button("Submit") if submit_button: st.write("You clicked the Submit button!") st.write("Entered text:", userProposal) result = pipe(f"<s>[INST] {userProposal} [/INST]") st.write(result)