File size: 299 Bytes
0f10080
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
"""
helpers for lora embeddings
"""


def get_linear_embedding_layers(model_type):
    """
    returns the linear embedding layers needed for loras, dependent on the model arch
    """
    if model_type == "phi-msft":
        return ["embd", "lm_head.linear"]
    return ["lm_head", "embed_tokens"]