File size: 970 Bytes
2c55a32 ffe9bd8 2c55a32 3c18b84 2c55a32 11d1ffd 55fcde9 28c9739 8376fe9 f4f04db 1afbd9f 2c55a32 3c18b84 2c55a32 dc12aad 2c55a32 185b654 af0db75 dc12aad |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
# custom_model.py
from transformers import PreTrainedModel, PretrainedConfig, AutoConfig, AutoModel
import torch.nn as nn
import subprocess
class CustomModelConfig(PretrainedConfig):
model_type = "custom-model"
def __init__(self, hidden_size=128, **kwargs):
super().__init__(**kwargs)
self.hidden_size = hidden_size
self.num_attention_heads = 1
self.vocab_size = 1000
self.num_hidden_layers = 1
self.intermediate_size = 256
self.hidden_act = "silu"
self.rms_norm_eps = 1e-8
class CustomModel(PreTrainedModel):
config_class = CustomModelConfig
def __init__(self, config):
super().__init__(config)
self.linear = nn.Linear(config.hidden_size, config.hidden_size)
def forward(self, input_ids):
output = self.linear(input_ids)
return output
AutoConfig.register("custom-model", CustomModelConfig)
AutoModel.register(CustomModelConfig, CustomModel)
|