File size: 1,021 Bytes
611dc78
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
from torchtune.models import convert_weights

from models.tokenizer import a2a_tokenizer
from models.mmllama3 import lora_mmllama3_8b, mmllama3_8b, imagebind_huge

__all__ = [
    "a2a_tokenizer",
    "lora_mmllama3_8b",
    "mmllama3_8b",
    "imagebind_huge",

]

_BASE_TRAINABLE = [
    "tok_embeddings.proj_to_llama.0.weight",
    "tok_embeddings.proj_to_llama.0.bias",
    "tok_embeddings.proj_to_llama.2.weight",
    "tok_embeddings.proj_to_llama.2.bias",
    "tok_embeddings.proj_to_llama.3.weight",
    "tok_embeddings.proj_to_llama.3.bias",
    "output.proj_from_llama.0.weight",
    "output.proj_from_llama.0.bias",
    "output.proj_from_llama.2.weight",
    "output.proj_from_llama.2.bias",
    "output.proj_from_llama.3.weight",
    "output.proj_from_llama.3.bias",
]

def add_proj_convert_weights():
    # extend _FROM_META torchtune -> meta mapping with new parameter names
    # allow existing ckpt-save code to work without changes
    convert_weights._FROM_META.update({a: a for a in _BASE_TRAINABLE})