import streamlit as st
from transformers import pipeline
from transformers import AutoModelForCausalLM, AutoTokenizer

x = st.slider('Select a value')
st.write(x, 'squared is', x * x)
text = st.text_input('Please input')
btn = st.button('Send')
result = st.empty()

llm = pipeline('text-generation', model='gpt2')

if btn:
    # res = llm(text)
    # result.success(res[0]["generated_text"].strip())
    
    model_id = "mistral-community/Mixtral-8x22B-v0.1"
    tokenizer = AutoTokenizer.from_pretrained(model_id)

    model = AutoModelForCausalLM.from_pretrained(model_id)

    text = "Hello my name is"
    inputs = tokenizer(text, return_tensors="pt")

    outputs = model.generate(**inputs, max_new_tokens=60)

    result.success(tokenizer.decode(outputs[0], skip_special_tokens=True))