Spaces:
Sleeping
Sleeping
import torch | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
MODEL_NAME = "bigcode/starcoder" | |
class AIModel: | |
def _init_(self): | |
self.tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME) | |
self.model = AutoModelForCausalLM.from_pretrained(MODEL_NAME, torch_dtype=torch.float16, device_map="auto") | |
def generate_code(self, prompt, max_length=300): | |
inputs = self.tokenizer(prompt, return_tensors="pt").to("cuda") | |
outputs = self.model.generate(**inputs, max_length=max_length, num_return_sequences=1, temperature=0.7) | |
return self.tokenizer.decode(outputs[0], skip_special_tokens=True) |