Spaces:
Sleeping
Sleeping
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline | |
import gradio as gr | |
# Load the model | |
model_name = "Salesforce/codegen-350M-mono" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True) | |
generator = pipeline("text-generation", model=model, tokenizer=tokenizer, pad_token_id=tokenizer.eos_token_id) | |
# Function to generate code | |
def generate_code(prompt): | |
output = generator(prompt, max_new_tokens=256, do_sample=True, temperature=0.3, top_p=0.95) | |
return output[0]["generated_text"] | |
# Gradio UI | |
ui = gr.Interface( | |
fn=generate_code, | |
inputs=gr.Textbox(lines=4, label="π¬ Enter your Python prompt"), | |
outputs=gr.Code(label="π§ Generated Python Code"), | |
title="π€ AI Python Code Generator", | |
description="Type a task like 'write a function to reverse a list', and get Python code.", | |
theme="default" | |
) | |
ui.launch() | |