from transformers import AutoTokenizer, AutoModelForCausalLM
import gradio as gr

model_name = "Salesforce/codegen-350M-mono"
codegen_token = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)

def codegen(intent):
    """Give input as text which reflects intent of the program.
    """
    #text = "Write a function which takes 2 numbers as input and returns the larger of the two."

    input_ids = codegen_token(intent, return_tensors="pt").input_ids
    outcode_ids = model.generate(input_ids, max_length=256)
    response = codegen_token.decode(outcode_ids[0], skip_special_tokens=True)
    return response

# UX
in_text = gr.Textbox(lines=1, label="Place your intent here.")
out = gr.Textbox(lines=1, label="Generated python code", placeholder="")
gr.Interface(codegen, inputs=in_text, outputs=out).launch()