{ "alpha_pattern": {}, "auto_mapping": null, "base_model_name_or_path": "google/gemma-3n-E2B-it", "bias": "none", "corda_config": null, "eva_config": null, "exclude_modules": null, "fan_in_fan_out": false, "inference_mode": true, "init_lora_weights": true, "layer_replication": null, "layers_pattern": null, "layers_to_transform": null, "loftq_config": {}, "lora_alpha": 32, "lora_bias": false, "lora_dropout": 0.05, "megatron_config": null, "megatron_core": "megatron.core", "modules_to_save": null, "peft_type": "LORA", "qalora_group_size": 16, "r": 16, "rank_pattern": {}, "revision": null, "target_modules": [ "per_layer_model_projection", "altup_projections.0", "correction_coefs", "altup_projections.2", "down_proj", "ffw_layer_2", "per_layer_projection", "input_proj_linear", "v_proj", "altup_unembed_projections.2", "up_proj", "o_proj", "q_proj", "embedding_projection", "k_proj", "post", "ffw_layer_1", "altup_projections.1", "altup_unembed_projections.0", "per_layer_input_gate", "gate_proj", "linear_end", "linear_left", "linear_start", "pos_proj", "linear_right", "modality_router", "altup_unembed_projections.1", "prediction_coefs" ], "task_type": "CAUSAL_LM", "trainable_token_indices": null, "use_dora": false, "use_qalora": false, "use_rslora": false }