{ "hook_point_in": "blocks.3.hook_mlp_out", "hook_point_out": "blocks.3.hook_mlp_out", "use_decoder_bias": false, "apply_decoder_bias_to_pre_encoder": true, "decoder_bias_init_method": "geometric_median", "expansion_factor": 32, "d_model": 768, "d_sae": 24576, "norm_activation": "token-wise", "decoder_exactly_unit_norm": false, "use_glu_encoder": false, "l1_coefficient": 0.00012, "lp": 1, "use_ghost_grads": true }