from transformers import AutoModel import gradio as gr import fasttext import torch from huggingface_hub import hf_hub_download repo_id = "tevykuch/zeroshot-reptile" filename = "metalearn_wordy.bin" model_path = hf_hub_download(repo_id=repo_id, filename=filename) fasttext_model = fasttext.load_model(model_path) model = AutoModel.from_pretrained(repo_id, force_download=True) def predict(input_text): words = input_text.split() embeddings = torch.tensor([fasttext_model.get_word_vector(word) for word in words]) avg_embedding = embeddings.mean(dim=0).unsqueeze(0) with torch.no_grad(): output = model(avg_embedding) predicted_class = output.argmax(dim=1).item() return f"Predicted class: {predicted_class}" iface = gr.Interface(fn=predict, inputs="text", outputs="text", title="My Model Demo", description="Enter some text to see the model prediction.") iface.launch()