CaGBERT / app.py
MSey's picture
Update app.py
c2e4cc3 verified
import os
from huggingface_hub import login
from transformers import pipeline, AutoTokenizer
import streamlit as st
# Access the secret token from HF secrets
hf_token = os.getenv("HF_MODEL_TOKEN")
# Login to Hugging Face
login(token=hf_token)
@st.cache_resource
def load_pipe():
model_name = "MSey/_table_CaBERT_0003_gbert-base_fl32_checkpoint-15852"
return pipeline("token-classification", model=model_name), AutoTokenizer.from_pretrained(model_name)
pipe, tokenizer = load_pipe()
st.header("Test Environment for GBERT Ca Model")
user_input = st.text_input("Enter your Prompt here:", "")
if user_input:
with st.spinner('Generating response...'):
response = pipe(user_input)
st.write("Response:")
tuples = ""
# Process each entity and highlight the labeled words
for entity in response:
label = entity['entity']
word = entity["word"]
tuples += f"{word}\t{label}\n"
# Display the highlighted text using st.markdown
st.text(tuples)