File size: 845 Bytes
4f4f02f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
from transformers import AutoModelForCausalLM, AutoTokenizer
from typing import Dict, List, Any


class EndpointHandler():
    def __init__(self, path=""):
        self.model = AutoModelForCausalLM.from_pretrained(path,
                                                          trust_remote_code=True)
        self.tokenizer = AutoTokenizer.from_pretrained(path)

    def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
        input_text = data.pop("inputs", data)
        inputs = self.tokenizer(input_text,
                                return_tensors="pt")
        input_ids = inputs.input_ids
        attention_mask = inputs.attention_mask
        score = self.model(input_ids=input_ids,
                           attention_mask=attention_mask,
                           labels=input_ids).loss.item()
        return score