import spaces
import gradio as gr
from transformers import AutoModelForSeq2SeqLM



import os
from huggingface_hub import login

@spaces.GPU
def fine_tune_model(model_name):
    #login(api_key.strip())
    # Load the model and tokenizer
    model = AutoModelForSeq2SeqLM.from_pretrained(model_name.strip())

    return 'WORKS!'#model

# Create Gradio interface
try:
    iface = gr.Interface(
        fn=fine_tune_model,
        inputs=[
            gr.Textbox(label="Model Name (e.g., 'google/t5-efficient-tiny-nh8')"),
        ],
        outputs="text",
        title="Fine-Tune Hugging Face Model",
        description="This interface allows you to fine-tune a Hugging Face model on a specified dataset."
    )

    # Launch the interface
    iface.launch()    
except Exception as e:
    print(f"An error occurred: {str(e)}, TB: {traceback.format_exc()}")