bghira commited on
Commit
f85bdc9
·
verified ·
1 Parent(s): c7efbcb

has t5 text encoder layers which do not load and cause fatal error

Browse files

lora_te1_text_model_encoder_layers_0_mlp_fc1.alpha, lora_te1_text_model_encoder_layers_0_mlp_fc1.lora_down.weight, lora_te1_text_model_encoder_layers_0_mlp_fc1.lora_up.weight, lora_te1_text_model_encoder_layers_0_mlp_fc2.alpha, lora_te1_text_model_encoder_layers_0_mlp_fc2.lora_down.weight, lora_te1_text_model_encoder_layers_0_mlp_fc2.lora_up.weight, lora_te1_text_model_encoder_layers_0_self_attn_k_proj.alpha, lora_te1_text_model_encoder_layers_0_self_attn_k_proj.lora_down.weight, lora_te1_text_model_encoder_layers_0_self_attn_k_proj.lora_up.weight, lora_te1_text_model_encoder_layers_0_self_attn_out_proj.alpha, lora_te1_text_model_encoder_layers_0_self_attn_out_proj.lora_down.weight, lora_te1_text_model_encoder_layers_0_self_attn_out_proj.lora_up.weight, lora_te1_text_model_encoder_layers_0_self_attn_q_proj.alpha, lora_te1_text_model_encoder_layers_0_self_attn_q_proj.lora_down.weight, lora_te1_text_model_encoder_layers_0_self_attn_q_proj.lora_up.weight, lora_te1_text_model_encoder_layers_0_self_attn_v_proj.alpha, lora_te1_text_model_encoder_layers_0_self_attn_v_proj.lora_down.weight, lora_te1_text_model_encoder_layers_0_self_attn_v_proj.lora_up.weight, lora_te1_text_model_encoder_layers_10_mlp_fc1.alpha, lora_te1_text_model_encoder_layers_10_mlp_fc1.lora_down.weight, lora_te1_text_model_encoder_layers_10_mlp_fc1.lora_up.weight, lora_te1_text_model_encoder_layers_10_mlp_fc2.alpha, lora_te1_text_model_encoder_layers_10_mlp_fc2.lora_down.weight, lora_te1_text_model_encoder_layers_10_mlp_fc2.lora_up.weight, lora_te1_text_model_encoder_layers_10_self_attn_k_proj.alpha, lora_te1_text_model_encoder_layers_10_self_attn_k_proj.lora_down.weight, lora_te1_text_model_encoder_layers_10_self_attn_k_proj.lora_up.weight, lora_te1_text_model_encoder_layers_10_self_attn_out_proj.alpha, lora_te1_text_model_encoder_layers_10_self_attn_out_proj.lora_down.weight, lora_te1_text_model_encoder_layers_10_self_attn_out_proj.lora_up.weight, lora_te1_text_model_encoder_layers_10_self_attn_q_proj.alpha, lora_te1_text_model_encoder_layers_10_self_attn_q_proj.lora_down.weight, lora_te1_text_model_encoder_layers_10_self_attn_q_proj.lora_up.weight, lora_te1_text_model_encoder_layers_10_self_attn_v_proj.alpha, lora_te1_text_model_encoder_layers_10_self_attn_v_proj.lora_down.weight, lora_te1_text_model_encoder_layers_10_self_attn_v_proj.lora_up.weight, lora_te1_text_model_encoder_layers_11_mlp_fc1.alpha, lora_te1_text_model_encoder_layers_11_mlp_fc1.lora_down.weight, lora_te1_text_model_encoder_layers_11_mlp_fc1.lora_up.weight, lora_te1_text_model_encoder_layers_11_mlp_fc2.alpha, lora_te1_text_model_encoder_layers_11_mlp_fc2.lora_down.weight, lora_te1_text_model_encoder_layers_11_mlp_fc2.lora_up.weight, lora_te1_text_model_encoder_layers_11_self_attn_k_proj.alpha, lora_te1_text_model_encoder_layers_11_self_attn_k_proj.lora_down.weight, lora_te1_text_model_encoder_layers_11_self_attn_k_proj.lora_up.weight, lora_te1_text_model_encoder_layers_11_self_attn_out_proj.alpha, lora_te1_text_model_encoder_layers_11_self_attn_out_proj.lora_down.weight, lora_te1_text_model_encoder_layers_11_self_attn_out_proj.lora_up.weight, lora_te1_text_model_encoder_layers_11_self_attn_q_proj.alpha, lora_te1_text_model_encoder_layers_11_self_attn_q_proj.lora_down.weight, lora_te1_text_model_encoder_layers_11_self_attn_q_proj.lora_up.weight, lora_te1_text_model_encoder_layers_11_self_attn_v_proj.alpha, lora_te1_text_model_encoder_layers_11_self_attn_v_proj.lora_down.weight, lora_te1_text_model_encoder_layers_11_self_attn_v_proj.lora_up.weight, lora_te1_text_model_encoder_layers_1_mlp_fc1.alpha, lora_te1_text_model_encoder_layers_1_mlp_fc1.lora_down.weight, lora_te1_text_model_encoder_layers_1_mlp_fc1.lora_up.weight, lora_te1_text_model_encoder_layers_1_mlp_fc2.alpha, lora_te1_text_model_encoder_layers_1_mlp_fc2.lora_down.weight, lora_te1_text_model_encoder_layers_1_mlp_fc2.lora_up.weight, lora_te1_text_model_encoder_layers_1_self_attn_k_proj.alpha, lora_te1_text_model_encoder_layers_1_self_attn_k_proj.lora_down.weight, lora_te1_text_model_encoder_layers_1_self_attn_k_proj.lora_up.weight, lora_te1_text_model_encoder_layers_1_self_attn_out_proj.alpha, lora_te1_text_model_encoder_layers_1_self_attn_out_proj.lora_down.weight, lora_te1_text_model_encoder_layers_1_self_attn_out_proj.lora_up.weight, lora_te1_text_model_encoder_layers_1_self_attn_q_proj.alpha, lora_te1_text_model_encoder_layers_1_self_attn_q_proj.lora_down.weight, lora_te1_text_model_encoder_layers_1_self_attn_q_proj.lora_up.weight, lora_te1_text_model_encoder_layers_1_self_attn_v_proj.alpha, lora_te1_text_model_encoder_layers_1_self_attn_v_proj.lora_down.weight, lora_te1_text_model_encoder_layers_1_self_attn_v_proj.lora_up.weight, lora_te1_text_model_encoder_layers_2_mlp_fc1.alpha, lora_te1_text_model_encoder_layers_2_mlp_fc1.lora_down.weight, lora_te1_text_model_encoder_layers_2_mlp_fc1.lora_up.weight, lora_te1_text_model_encoder_layers_2_mlp_fc2.alpha, lora_te1_text_model_encoder_layers_2_mlp_fc2.lora_down.weight, lora_te1_text_model_encoder_layers_2_mlp_fc2.lora_up.weight, lora_te1_text_model_encoder_layers_2_self_attn_k_proj.alpha, lora_te1_text_model_encoder_layers_2_self_attn_k_proj.lora_down.weight, lora_te1_text_model_encoder_layers_2_self_attn_k_proj.lora_up.weight, lora_te1_text_model_encoder_layers_2_self_attn_out_proj.alpha, lora_te1_text_model_encoder_layers_2_self_attn_out_proj.lora_down.weight, lora_te1_text_model_encoder_layers_2_self_attn_out_proj.lora_up.weight, lora_te1_text_model_encoder_layers_2_self_attn_q_proj.alpha, lora_te1_text_model_encoder_layers_2_self_attn_q_proj.lora_down.weight, lora_te1_text_model_encoder_layers_2_self_attn_q_proj.lora_up.weight, lora_te1_text_model_encoder_layers_2_self_attn_v_proj.alpha, lora_te1_text_model_encoder_layers_2_self_attn_v_proj.lora_down.weight, lora_te1_text_model_encoder_layers_2_self_attn_v_proj.lora_up.weight, lora_te1_text_model_encoder_layers_3_mlp_fc1.alpha, lora_te1_text_model_encoder_layers_3_mlp_fc1.lora_down.weight, lora_te1_text_model_encoder_layers_3_mlp_fc1.lora_up.weight, lora_te1_text_model_encoder_layers_3_mlp_fc2.alpha, lora_te1_text_model_encoder_layers_3_mlp_fc2.lora_down.weight, lora_te1_text_model_encoder_layers_3_mlp_fc2.lora_up.weight, lora_te1_text_model_encoder_layers_3_self_attn_k_proj.alpha, lora_te1_text_model_encoder_layers_3_self_attn_k_proj.lora_down.weight, lora_te1_text_model_encoder_layers_3_self_attn_k_proj.lora_up.weight, lora_te1_text_model_encoder_layers_3_self_attn_out_proj.alpha, lora_te1_text_model_encoder_layers_3_self_attn_out_proj.lora_down.weight, lora_te1_text_model_encoder_layers_3_self_attn_out_proj.lora_up.weight, lora_te1_text_model_encoder_layers_3_self_attn_q_proj.alpha, lora_te1_text_model_encoder_layers_3_self_attn_q_proj.lora_down.weight, lora_te1_text_model_encoder_layers_3_self_attn_q_proj.lora_up.weight, lora_te1_text_model_encoder_layers_3_self_attn_v_proj.alpha, lora_te1_text_model_encoder_layers_3_self_attn_v_proj.lora_down.weight, lora_te1_text_model_encoder_layers_3_self_attn_v_proj.lora_up.weight, lora_te1_text_model_encoder_layers_4_mlp_fc1.alpha, lora_te1_text_model_encoder_layers_4_mlp_fc1.lora_down.weight, lora_te1_text_model_encoder_layers_4_mlp_fc1.lora_up.weight, lora_te1_text_model_encoder_layers_4_mlp_fc2.alpha, lora_te1_text_model_encoder_layers_4_mlp_fc2.lora_down.weight, lora_te1_text_model_encoder_layers_4_mlp_fc2.lora_up.weight, lora_te1_text_model_encoder_layers_4_self_attn_k_proj.alpha, lora_te1_text_model_encoder_layers_4_self_attn_k_proj.lora_down.weight, lora_te1_text_model_encoder_layers_4_self_attn_k_proj.lora_up.weight, lora_te1_text_model_encoder_layers_4_self_attn_out_proj.alpha, lora_te1_text_model_encoder_layers_4_self_attn_out_proj.lora_down.weight, lora_te1_text_model_encoder_layers_4_self_attn_out_proj.lora_up.weight, lora_te1_text_model_encoder_layers_4_self_attn_q_proj.alpha, lora_te1_text_model_encoder_layers_4_self_attn_q_proj.lora_down.weight, lora_te1_text_model_encoder_layers_4_self_attn_q_proj.lora_up.weight, lora_te1_text_model_encoder_layers_4_self_attn_v_proj.alpha, lora_te1_text_model_encoder_layers_4_self_attn_v_proj.lora_down.weight, lora_te1_text_model_encoder_layers_4_self_attn_v_proj.lora_up.weight, lora_te1_text_model_encoder_layers_5_mlp_fc1.alpha, lora_te1_text_model_encoder_layers_5_mlp_fc1.lora_down.weight, lora_te1_text_model_encoder_layers_5_mlp_fc1.lora_up.weight, lora_te1_text_model_encoder_layers_5_mlp_fc2.alpha, lora_te1_text_model_encoder_layers_5_mlp_fc2.lora_down.weight, lora_te1_text_model_encoder_layers_5_mlp_fc2.lora_up.weight, lora_te1_text_model_encoder_layers_5_self_attn_k_proj.alpha, lora_te1_text_model_encoder_layers_5_self_attn_k_proj.lora_down.weight, lora_te1_text_model_encoder_layers_5_self_attn_k_proj.lora_up.weight, lora_te1_text_model_encoder_layers_5_self_attn_out_proj.alpha, lora_te1_text_model_encoder_layers_5_self_attn_out_proj.lora_down.weight, lora_te1_text_model_encoder_layers_5_self_attn_out_proj.lora_up.weight, lora_te1_text_model_encoder_layers_5_self_attn_q_proj.alpha, lora_te1_text_model_encoder_layers_5_self_attn_q_proj.lora_down.weight, lora_te1_text_model_encoder_layers_5_self_attn_q_proj.lora_up.weight, lora_te1_text_model_encoder_layers_5_self_attn_v_proj.alpha, lora_te1_text_model_encoder_layers_5_self_attn_v_proj.lora_down.weight, lora_te1_text_model_encoder_layers_5_self_attn_v_proj.lora_up.weight, lora_te1_text_model_encoder_layers_6_mlp_fc1.alpha, lora_te1_text_model_encoder_layers_6_mlp_fc1.lora_down.weight, lora_te1_text_model_encoder_layers_6_mlp_fc1.lora_up.weight, lora_te1_text_model_encoder_layers_6_mlp_fc2.alpha, lora_te1_text_model_encoder_layers_6_mlp_fc2.lora_down.weight, lora_te1_text_model_encoder_layers_6_mlp_fc2.lora_up.weight, lora_te1_text_model_encoder_layers_6_self_attn_k_proj.alpha, lora_te1_text_model_encoder_layers_6_self_attn_k_proj.lora_down.weight, lora_te1_text_model_encoder_layers_6_self_attn_k_proj.lora_up.weight, lora_te1_text_model_encoder_layers_6_self_attn_out_proj.alpha, lora_te1_text_model_encoder_layers_6_self_attn_out_proj.lora_down.weight, lora_te1_text_model_encoder_layers_6_self_attn_out_proj.lora_up.weight, lora_te1_tex

Files changed (1) hide show
  1. Anime v1.3.safetensors +3 -0
Anime v1.3.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d061b16de09997281ede379fe7b5910e99b36cfadd856184630327bdd815baf
3
+ size 211010756