from fastapi import FastAPI, HTTPException
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
import os

app = FastAPI()

# Hugging Face authentication token
hf_token = os.getenv("HF_TOKEN")

# Load model and tokenizer with the authentication token
model_name = "unsloth/codellama-34b-bnb-4bit"
tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=hf_token)
model = AutoModelForCausalLM.from_pretrained(model_name, use_auth_token=hf_token)
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)

@app.get("/")
def greet_json():
    return {"Hello": "World!"}

@app.post("/generate")
def generate_text(prompt: str):
    try:
        generated_text = generator(prompt, max_length=50)
        return {"generated_text": generated_text[0]['generated_text']}
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))