from transformers import PretrainedConfig | |
class PretrainedWord2VecHFConfig(PretrainedConfig): | |
model_type = "glove" | |
def __init__(self, num_words=400001, vector_size=50, **kwargs): | |
self.num_words = num_words | |
self.vector_size = vector_size | |
self.hidden_size = self.vector_size # Required for sBERT | |
super().__init__(**kwargs) | |