adamkarvonen commited on
Commit
9f3f69a
·
verified ·
1 Parent(s): 68a20f8

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopK_gemma-2-2b__0108_resid_post_layer_12_trainer_10_eval_results.json +64 -0
  2. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopK_gemma-2-2b__0108_resid_post_layer_12_trainer_11_eval_results.json +64 -0
  3. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopK_gemma-2-2b__0108_resid_post_layer_12_trainer_6_eval_results.json +64 -0
  4. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopK_gemma-2-2b__0108_resid_post_layer_12_trainer_7_eval_results.json +64 -0
  5. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopK_gemma-2-2b__0108_resid_post_layer_12_trainer_8_eval_results.json +64 -0
  6. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAE_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json +64 -0
  7. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAE_gemma-2-2b__0108_resid_post_layer_12_trainer_1_eval_results.json +64 -0
  8. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAE_gemma-2-2b__0108_resid_post_layer_12_trainer_2_eval_results.json +64 -0
  9. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAE_gemma-2-2b__0108_resid_post_layer_12_trainer_3_eval_results.json +64 -0
  10. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAE_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json +64 -0
  11. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAE_gemma-2-2b__0108_resid_post_layer_12_trainer_5_eval_results.json +64 -0
  12. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_JumpRelu_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json +64 -0
  13. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_JumpRelu_gemma-2-2b__0108_resid_post_layer_12_trainer_1_eval_results.json +64 -0
  14. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_JumpRelu_gemma-2-2b__0108_resid_post_layer_12_trainer_2_eval_results.json +64 -0
  15. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_JumpRelu_gemma-2-2b__0108_resid_post_layer_12_trainer_3_eval_results.json +64 -0
  16. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_JumpRelu_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json +64 -0
  17. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_JumpRelu_gemma-2-2b__0108_resid_post_layer_12_trainer_5_eval_results.json +64 -0
  18. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json +64 -0
  19. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_1_eval_results.json +64 -0
  20. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_2_eval_results.json +64 -0
  21. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_3_eval_results.json +64 -0
  22. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json +64 -0
  23. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_5_eval_results.json +64 -0
  24. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json +64 -0
  25. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_1_eval_results.json +64 -0
  26. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_2_eval_results.json +64 -0
  27. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_3_eval_results.json +64 -0
  28. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json +64 -0
  29. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_5_eval_results.json +64 -0
  30. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_TopK_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json +64 -0
  31. autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_TopK_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json +64 -0
  32. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopK_gemma-2-2b__0108_resid_post_layer_12_trainer_9_eval_results.json +670 -0
  33. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_JumpRelu_gemma-2-2b__0108_resid_post_layer_12_trainer_5_eval_results.json +670 -0
  34. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json +670 -0
  35. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_1_eval_results.json +670 -0
  36. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_2_eval_results.json +670 -0
  37. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_3_eval_results.json +670 -0
  38. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json +670 -0
  39. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_5_eval_results.json +670 -0
  40. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json +670 -0
  41. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_1_eval_results.json +670 -0
  42. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_2_eval_results.json +670 -0
  43. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_3_eval_results.json +670 -0
  44. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json +670 -0
  45. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_5_eval_results.json +670 -0
  46. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_TopK_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json +670 -0
  47. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_TopK_gemma-2-2b__0108_resid_post_layer_12_trainer_1_eval_results.json +670 -0
  48. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_TopK_gemma-2-2b__0108_resid_post_layer_12_trainer_2_eval_results.json +670 -0
  49. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_TopK_gemma-2-2b__0108_resid_post_layer_12_trainer_3_eval_results.json +670 -0
  50. sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_TopK_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json +670 -0
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopK_gemma-2-2b__0108_resid_post_layer_12_trainer_10_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "b2aadf41-9229-4b80-958d-5e1d0beb96b4",
27
+ "datetime_epoch_millis": 1737020679580,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7916486859321594,
31
+ "autointerp_std_dev": 0.13035988807678223
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopKTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_10",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "batch_topk",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopK_gemma-2-2b__0108_resid_post_layer_12_trainer_11_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "df1d7a46-ab33-4ee6-ac19-ecfff9e5f805",
27
+ "datetime_epoch_millis": 1737021826603,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7867162823677063,
31
+ "autointerp_std_dev": 0.13692742586135864
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopKTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_11",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "batch_topk",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopK_gemma-2-2b__0108_resid_post_layer_12_trainer_6_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "f5e386fa-5539-4df9-aa75-f10a2f489a04",
27
+ "datetime_epoch_millis": 1737021220492,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.8506948947906494,
31
+ "autointerp_std_dev": 0.12770865857601166
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopKTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_6",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "batch_topk",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopK_gemma-2-2b__0108_resid_post_layer_12_trainer_7_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "575e3135-d6cf-4049-a5e2-f18d96b22a5f",
27
+ "datetime_epoch_millis": 1737019628520,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.8336908221244812,
31
+ "autointerp_std_dev": 0.12862683832645416
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopKTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_7",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "batch_topk",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopK_gemma-2-2b__0108_resid_post_layer_12_trainer_8_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "b43c69a0-f58d-482e-829d-71f3c0e62679",
27
+ "datetime_epoch_millis": 1737020153571,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.8108108043670654,
31
+ "autointerp_std_dev": 0.13136722147464752
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopKTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_8",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "batch_topk",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAE_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "1e79acfb-2a14-4529-86c6-e3ce802226dd",
27
+ "datetime_epoch_millis": 1737022931700,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7144285440444946,
31
+ "autointerp_std_dev": 0.12215999513864517
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAETrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_0",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "gated",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAE_gemma-2-2b__0108_resid_post_layer_12_trainer_1_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "fbe17314-f0d0-4383-b5f2-49be399c3d44",
27
+ "datetime_epoch_millis": 1737022377077,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7314285635948181,
31
+ "autointerp_std_dev": 0.12664315104484558
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAETrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_1",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "gated",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAE_gemma-2-2b__0108_resid_post_layer_12_trainer_2_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "6f7eea01-4958-41ab-83ec-3ac156c56e91",
27
+ "datetime_epoch_millis": 1737023586280,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7563570737838745,
31
+ "autointerp_std_dev": 0.13377536833286285
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAETrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_2",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "gated",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAE_gemma-2-2b__0108_resid_post_layer_12_trainer_3_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "f693514c-12f1-4869-8592-a61b6a2187b5",
27
+ "datetime_epoch_millis": 1737024130626,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7946517467498779,
31
+ "autointerp_std_dev": 0.12868745625019073
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAETrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_3",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "gated",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAE_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "4cffce44-43ca-4cfc-b4da-12f05640877e",
27
+ "datetime_epoch_millis": 1737024709784,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.8074285984039307,
31
+ "autointerp_std_dev": 0.12618225812911987
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAETrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_4",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "gated",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAE_gemma-2-2b__0108_resid_post_layer_12_trainer_5_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "3aeba649-ffec-4b61-9514-57c89311dff3",
27
+ "datetime_epoch_millis": 1737025295403,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.8160302639007568,
31
+ "autointerp_std_dev": 0.12544232606887817
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_GatedSAETrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_5",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "gated",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_JumpRelu_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "506b14ae-6fd7-4ce9-9e0f-8f6aa482cde1",
27
+ "datetime_epoch_millis": 1737025886769,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.8462530374526978,
31
+ "autointerp_std_dev": 0.12418957054615021
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_JumpReluTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_0",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "jumprelu",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_JumpRelu_gemma-2-2b__0108_resid_post_layer_12_trainer_1_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "5c1b9d18-8495-4823-8c4f-f2905b592b29",
27
+ "datetime_epoch_millis": 1737026493355,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.8319495916366577,
31
+ "autointerp_std_dev": 0.12784433364868164
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_JumpReluTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_1",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "jumprelu",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_JumpRelu_gemma-2-2b__0108_resid_post_layer_12_trainer_2_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "464a53c9-4ffd-4fd1-bd74-77ea0246fdb8",
27
+ "datetime_epoch_millis": 1737027083971,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.8032143712043762,
31
+ "autointerp_std_dev": 0.13085922598838806
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_JumpReluTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_2",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "jumprelu",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_JumpRelu_gemma-2-2b__0108_resid_post_layer_12_trainer_3_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "a2ed548b-abad-4fd5-9c7c-78e3614ec79c",
27
+ "datetime_epoch_millis": 1737027710706,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7845702171325684,
31
+ "autointerp_std_dev": 0.13295450806617737
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_JumpReluTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_3",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "jumprelu",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_JumpRelu_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "4224b62c-a04b-4b0c-93b9-87b2a07070da",
27
+ "datetime_epoch_millis": 1737028317500,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7668572068214417,
31
+ "autointerp_std_dev": 0.13942508399486542
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_JumpReluTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_4",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "jumprelu",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_JumpRelu_gemma-2-2b__0108_resid_post_layer_12_trainer_5_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "7a19dad9-4871-4db7-b363-f78cf24e663a",
27
+ "datetime_epoch_millis": 1737028857354,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7268108129501343,
31
+ "autointerp_std_dev": 0.13264690339565277
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_JumpReluTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_5",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "jumprelu",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "144730f9-85e3-45c4-bd42-7aed780c62aa",
27
+ "datetime_epoch_millis": 1737029398253,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7399542331695557,
31
+ "autointerp_std_dev": 0.12953147292137146
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_PAnnealTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_0",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "p_anneal",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_1_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "742d9f1a-b0fd-44ed-b149-4302ec0660ba",
27
+ "datetime_epoch_millis": 1737029977306,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7536427974700928,
31
+ "autointerp_std_dev": 0.13322678208351135
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_PAnnealTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_1",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "p_anneal",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_2_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "dd8bb910-216b-4411-bd09-c272dffc19b5",
27
+ "datetime_epoch_millis": 1737030578375,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7672856450080872,
31
+ "autointerp_std_dev": 0.13456249237060547
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_PAnnealTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_2",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "p_anneal",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_3_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "b0d29009-e70d-4592-9aef-7d7676c71738",
27
+ "datetime_epoch_millis": 1737031112488,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.78857421875,
31
+ "autointerp_std_dev": 0.13087445497512817
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_PAnnealTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_3",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "p_anneal",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "e6a61d9a-8b30-4d66-b702-978696c4003d",
27
+ "datetime_epoch_millis": 1737031648574,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.8028915524482727,
31
+ "autointerp_std_dev": 0.13618695735931396
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_PAnnealTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_4",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "p_anneal",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_5_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "bf7f21e0-69f7-4c8c-b326-62ea7c88af98",
27
+ "datetime_epoch_millis": 1737032200544,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.814457356929779,
31
+ "autointerp_std_dev": 0.1355155110359192
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_PAnnealTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_5",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "p_anneal",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "e17170b2-f1e0-4cde-ac56-dea8344a66a7",
27
+ "datetime_epoch_millis": 1737032748971,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7646217942237854,
31
+ "autointerp_std_dev": 0.13070812821388245
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_StandardTrainerAprilUpdate_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_0",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "standard_april_update",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_1_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "31353c15-1431-4162-b084-c7d6d820819d",
27
+ "datetime_epoch_millis": 1737033292274,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7797142863273621,
31
+ "autointerp_std_dev": 0.12909041345119476
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_StandardTrainerAprilUpdate_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_1",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "standard_april_update",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_2_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "95335dac-f35b-4be4-a2a7-cb568f189847",
27
+ "datetime_epoch_millis": 1737033830239,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7874999642372131,
31
+ "autointerp_std_dev": 0.1359395980834961
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_StandardTrainerAprilUpdate_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_2",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "standard_april_update",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_3_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "175fe964-faa6-4885-ba9c-1a33da2c9d50",
27
+ "datetime_epoch_millis": 1737034379481,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.8052729368209839,
31
+ "autointerp_std_dev": 0.1297060251235962
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_StandardTrainerAprilUpdate_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_3",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "standard_april_update",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "36b51f9d-20d9-4b56-8c73-69319cf9cd1b",
27
+ "datetime_epoch_millis": 1737034925259,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.8175240159034729,
31
+ "autointerp_std_dev": 0.13424772024154663
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_StandardTrainerAprilUpdate_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_4",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "standard_april_update",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_5_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "27f9d751-b299-42aa-8bad-e7083d6ebe7b",
27
+ "datetime_epoch_millis": 1737035458193,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.8334287405014038,
31
+ "autointerp_std_dev": 0.1355169117450714
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_StandardTrainerAprilUpdate_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_5",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "standard_april_update",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_TopK_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "76a37e5b-1455-4221-bcbc-9c8b313f75bb",
27
+ "datetime_epoch_millis": 1737035997925,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.8556383848190308,
31
+ "autointerp_std_dev": 0.12594527006149292
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_TopKTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_0",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "topk",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
autointerp/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_TopK_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "autointerp",
3
+ "eval_config": {
4
+ "model_name": "gemma-2-2b",
5
+ "n_latents": 1000,
6
+ "override_latents": null,
7
+ "dead_latent_threshold": 15,
8
+ "random_seed": 42,
9
+ "dataset_name": "monology/pile-uncopyrighted",
10
+ "llm_context_size": 128,
11
+ "llm_batch_size": 32,
12
+ "llm_dtype": "bfloat16",
13
+ "buffer": 10,
14
+ "no_overlap": true,
15
+ "act_threshold_frac": 0.01,
16
+ "total_tokens": 2000000,
17
+ "scoring": true,
18
+ "max_tokens_in_explanation": 30,
19
+ "use_demos_in_explanation": true,
20
+ "n_top_ex_for_generation": 10,
21
+ "n_iw_sampled_ex_for_generation": 5,
22
+ "n_top_ex_for_scoring": 2,
23
+ "n_random_ex_for_scoring": 10,
24
+ "n_iw_sampled_ex_for_scoring": 2
25
+ },
26
+ "eval_id": "f3167cca-ffb2-46fc-b828-655cfafdeeb7",
27
+ "datetime_epoch_millis": 1737038165868,
28
+ "eval_result_metrics": {
29
+ "autointerp": {
30
+ "autointerp_score": 0.7819247245788574,
31
+ "autointerp_std_dev": 0.13157162070274353
32
+ }
33
+ },
34
+ "eval_result_details": [],
35
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
36
+ "sae_lens_id": "custom_sae",
37
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_TopKTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_4",
38
+ "sae_lens_version": "5.3.1",
39
+ "sae_cfg_dict": {
40
+ "model_name": "gemma-2-2b",
41
+ "d_in": 2304,
42
+ "d_sae": 4096,
43
+ "hook_layer": 12,
44
+ "hook_name": "blocks.12.hook_resid_post",
45
+ "context_size": null,
46
+ "hook_head_index": null,
47
+ "architecture": "topk",
48
+ "apply_b_dec_to_input": null,
49
+ "finetuning_scaling_factor": null,
50
+ "activation_fn_str": "",
51
+ "prepend_bos": true,
52
+ "normalize_activations": "none",
53
+ "dtype": "bfloat16",
54
+ "device": "",
55
+ "dataset_path": "",
56
+ "dataset_trust_remote_code": true,
57
+ "seqpos_slice": [
58
+ null
59
+ ],
60
+ "training_tokens": 499998720,
61
+ "sae_lens_training_version": null,
62
+ "neuronpedia_id": null
63
+ }
64
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopK_gemma-2-2b__0108_resid_post_layer_12_trainer_9_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "6d7b6093-a112-45a0-98f8-b067524aecfa",
30
+ "datetime_epoch_millis": 1737009983932,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9571625404059887,
44
+ "sae_top_1_test_accuracy": 0.7903124999999999,
45
+ "sae_top_2_test_accuracy": 0.8259187499999999,
46
+ "sae_top_5_test_accuracy": 0.8832249999999999,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9682000398635864,
65
+ "sae_top_1_test_accuracy": 0.8019999999999999,
66
+ "sae_top_2_test_accuracy": 0.8524,
67
+ "sae_top_5_test_accuracy": 0.8824,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9482000470161438,
84
+ "sae_top_1_test_accuracy": 0.7154,
85
+ "sae_top_2_test_accuracy": 0.7689999999999999,
86
+ "sae_top_5_test_accuracy": 0.843,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9276000618934631,
103
+ "sae_top_1_test_accuracy": 0.7956000000000001,
104
+ "sae_top_2_test_accuracy": 0.8051999999999999,
105
+ "sae_top_5_test_accuracy": 0.8634000000000001,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9154000401496887,
122
+ "sae_top_1_test_accuracy": 0.7303999999999999,
123
+ "sae_top_2_test_accuracy": 0.7652000000000001,
124
+ "sae_top_5_test_accuracy": 0.8252,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9745000302791595,
141
+ "sae_top_1_test_accuracy": 0.895,
142
+ "sae_top_2_test_accuracy": 0.895,
143
+ "sae_top_5_test_accuracy": 0.907,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9678000450134278,
160
+ "sae_top_1_test_accuracy": 0.6884,
161
+ "sae_top_2_test_accuracy": 0.7562000000000001,
162
+ "sae_top_5_test_accuracy": 0.8996000000000001,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.956000030040741,
179
+ "sae_top_1_test_accuracy": 0.7845,
180
+ "sae_top_2_test_accuracy": 0.83475,
181
+ "sae_top_5_test_accuracy": 0.8869999999999999,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9996000289916992,
198
+ "sae_top_1_test_accuracy": 0.9112,
199
+ "sae_top_2_test_accuracy": 0.9296,
200
+ "sae_top_5_test_accuracy": 0.9582,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_BatchTopKTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_9",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "batch_topk",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9570000171661377,
240
+ "1": 0.9660000205039978,
241
+ "2": 0.9530000686645508,
242
+ "6": 0.9900000691413879,
243
+ "9": 0.9750000238418579
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.577,
275
+ "1": 0.62,
276
+ "2": 0.89,
277
+ "6": 0.976,
278
+ "9": 0.947
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.816,
282
+ "1": 0.631,
283
+ "2": 0.889,
284
+ "6": 0.974,
285
+ "9": 0.952
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.872,
289
+ "1": 0.724,
290
+ "2": 0.888,
291
+ "6": 0.975,
292
+ "9": 0.953
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9590000510215759,
298
+ "13": 0.9510000348091125,
299
+ "14": 0.9520000219345093,
300
+ "18": 0.921000063419342,
301
+ "19": 0.9580000638961792
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.73,
333
+ "13": 0.688,
334
+ "14": 0.636,
335
+ "18": 0.682,
336
+ "19": 0.841
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.735,
340
+ "13": 0.685,
341
+ "14": 0.876,
342
+ "18": 0.71,
343
+ "19": 0.839
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.856,
347
+ "13": 0.779,
348
+ "14": 0.883,
349
+ "18": 0.851,
350
+ "19": 0.846
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9530000686645508,
356
+ "21": 0.9350000619888306,
357
+ "22": 0.9110000729560852,
358
+ "25": 0.9600000381469727,
359
+ "26": 0.8790000677108765
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.856,
391
+ "21": 0.802,
392
+ "22": 0.798,
393
+ "25": 0.886,
394
+ "26": 0.636
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.882,
398
+ "21": 0.831,
399
+ "22": 0.811,
400
+ "25": 0.862,
401
+ "26": 0.64
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.921,
405
+ "21": 0.865,
406
+ "22": 0.86,
407
+ "25": 0.898,
408
+ "26": 0.773
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9490000605583191,
414
+ "2": 0.9340000152587891,
415
+ "3": 0.9140000343322754,
416
+ "5": 0.9190000295639038,
417
+ "6": 0.8610000610351562
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.834,
449
+ "2": 0.628,
450
+ "3": 0.649,
451
+ "5": 0.818,
452
+ "6": 0.723
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.9,
456
+ "2": 0.737,
457
+ "3": 0.656,
458
+ "5": 0.809,
459
+ "6": 0.724
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.9,
463
+ "2": 0.873,
464
+ "3": 0.787,
465
+ "5": 0.805,
466
+ "6": 0.761
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9740000367164612,
472
+ "5.0": 0.9750000238418579
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.895,
492
+ "5.0": 0.895
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.895,
496
+ "5.0": 0.895
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.907,
500
+ "5.0": 0.907
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9630000591278076,
506
+ "Python": 0.984000027179718,
507
+ "HTML": 0.9820000529289246,
508
+ "Java": 0.9580000638961792,
509
+ "PHP": 0.9520000219345093
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.626,
541
+ "Python": 0.639,
542
+ "HTML": 0.909,
543
+ "Java": 0.646,
544
+ "PHP": 0.622
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.666,
548
+ "Python": 0.67,
549
+ "HTML": 0.915,
550
+ "Java": 0.648,
551
+ "PHP": 0.882
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.859,
555
+ "Python": 0.936,
556
+ "HTML": 0.921,
557
+ "Java": 0.882,
558
+ "PHP": 0.9
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.9430000185966492,
564
+ "1": 0.9860000610351562,
565
+ "2": 0.9380000233650208,
566
+ "3": 0.9570000171661377
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.842,
594
+ "1": 0.94,
595
+ "2": 0.723,
596
+ "3": 0.633
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.842,
600
+ "1": 0.952,
601
+ "2": 0.836,
602
+ "3": 0.709
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.879,
606
+ "1": 0.953,
607
+ "2": 0.84,
608
+ "3": 0.876
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 0.999000072479248,
614
+ "fr": 1.0,
615
+ "de": 1.0,
616
+ "es": 1.0,
617
+ "nl": 0.999000072479248
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 1.0,
649
+ "fr": 0.992,
650
+ "de": 0.883,
651
+ "es": 0.917,
652
+ "nl": 0.764
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 0.998,
656
+ "fr": 0.996,
657
+ "de": 0.883,
658
+ "es": 0.934,
659
+ "nl": 0.837
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.997,
663
+ "fr": 0.995,
664
+ "de": 0.944,
665
+ "es": 0.998,
666
+ "nl": 0.857
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_JumpRelu_gemma-2-2b__0108_resid_post_layer_12_trainer_5_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "e3dccb4e-69d5-45c7-a7f9-981c68f6d519",
30
+ "datetime_epoch_millis": 1737014226818,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9585687953978778,
44
+ "sae_top_1_test_accuracy": 0.7935062499999999,
45
+ "sae_top_2_test_accuracy": 0.824975,
46
+ "sae_top_5_test_accuracy": 0.86745,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9678000330924987,
65
+ "sae_top_1_test_accuracy": 0.8062000000000001,
66
+ "sae_top_2_test_accuracy": 0.8452,
67
+ "sae_top_5_test_accuracy": 0.8702,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9534000515937805,
84
+ "sae_top_1_test_accuracy": 0.7816,
85
+ "sae_top_2_test_accuracy": 0.7827999999999999,
86
+ "sae_top_5_test_accuracy": 0.8274000000000001,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9302000522613525,
103
+ "sae_top_1_test_accuracy": 0.7525999999999999,
104
+ "sae_top_2_test_accuracy": 0.767,
105
+ "sae_top_5_test_accuracy": 0.8206,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9190000414848327,
122
+ "sae_top_1_test_accuracy": 0.7806,
123
+ "sae_top_2_test_accuracy": 0.8200000000000001,
124
+ "sae_top_5_test_accuracy": 0.8423999999999999,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9745000302791595,
141
+ "sae_top_1_test_accuracy": 0.862,
142
+ "sae_top_2_test_accuracy": 0.931,
143
+ "sae_top_5_test_accuracy": 0.962,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9720000624656677,
160
+ "sae_top_1_test_accuracy": 0.6416,
161
+ "sae_top_2_test_accuracy": 0.6962,
162
+ "sae_top_5_test_accuracy": 0.8076000000000001,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9522500485181808,
179
+ "sae_top_1_test_accuracy": 0.83725,
180
+ "sae_top_2_test_accuracy": 0.8539999999999999,
181
+ "sae_top_5_test_accuracy": 0.9019999999999999,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9994000434875489,
198
+ "sae_top_1_test_accuracy": 0.8862,
199
+ "sae_top_2_test_accuracy": 0.9036000000000002,
200
+ "sae_top_5_test_accuracy": 0.9074,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_JumpReluTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_5",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "jumprelu",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9520000219345093,
240
+ "1": 0.9610000252723694,
241
+ "2": 0.9500000476837158,
242
+ "6": 0.9920000433921814,
243
+ "9": 0.984000027179718
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.796,
275
+ "1": 0.606,
276
+ "2": 0.866,
277
+ "6": 0.823,
278
+ "9": 0.94
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.809,
282
+ "1": 0.632,
283
+ "2": 0.87,
284
+ "6": 0.973,
285
+ "9": 0.942
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.808,
289
+ "1": 0.743,
290
+ "2": 0.874,
291
+ "6": 0.979,
292
+ "9": 0.947
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9630000591278076,
298
+ "13": 0.9520000219345093,
299
+ "14": 0.9500000476837158,
300
+ "18": 0.9390000700950623,
301
+ "19": 0.9630000591278076
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.839,
333
+ "13": 0.687,
334
+ "14": 0.838,
335
+ "18": 0.701,
336
+ "19": 0.843
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.851,
340
+ "13": 0.671,
341
+ "14": 0.839,
342
+ "18": 0.706,
343
+ "19": 0.847
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.843,
347
+ "13": 0.8,
348
+ "14": 0.822,
349
+ "18": 0.817,
350
+ "19": 0.855
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.956000030040741,
356
+ "21": 0.9220000505447388,
357
+ "22": 0.9120000600814819,
358
+ "25": 0.968000054359436,
359
+ "26": 0.893000066280365
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.886,
391
+ "21": 0.698,
392
+ "22": 0.679,
393
+ "25": 0.881,
394
+ "26": 0.619
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.873,
398
+ "21": 0.754,
399
+ "22": 0.675,
400
+ "25": 0.878,
401
+ "26": 0.655
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.916,
405
+ "21": 0.852,
406
+ "22": 0.674,
407
+ "25": 0.895,
408
+ "26": 0.766
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9470000267028809,
414
+ "2": 0.937000036239624,
415
+ "3": 0.9120000600814819,
416
+ "5": 0.9200000166893005,
417
+ "6": 0.8790000677108765
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.845,
449
+ "2": 0.831,
450
+ "3": 0.728,
451
+ "5": 0.768,
452
+ "6": 0.731
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.902,
456
+ "2": 0.855,
457
+ "3": 0.73,
458
+ "5": 0.833,
459
+ "6": 0.78
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.91,
463
+ "2": 0.86,
464
+ "3": 0.803,
465
+ "5": 0.861,
466
+ "6": 0.778
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9740000367164612,
472
+ "5.0": 0.9750000238418579
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.862,
492
+ "5.0": 0.862
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.931,
496
+ "5.0": 0.931
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.962,
500
+ "5.0": 0.962
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9630000591278076,
506
+ "Python": 0.9850000739097595,
507
+ "HTML": 0.9850000739097595,
508
+ "Java": 0.968000054359436,
509
+ "PHP": 0.9590000510215759
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.636,
541
+ "Python": 0.623,
542
+ "HTML": 0.713,
543
+ "Java": 0.638,
544
+ "PHP": 0.598
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.661,
548
+ "Python": 0.665,
549
+ "HTML": 0.885,
550
+ "Java": 0.638,
551
+ "PHP": 0.632
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.881,
555
+ "Python": 0.729,
556
+ "HTML": 0.931,
557
+ "Java": 0.664,
558
+ "PHP": 0.833
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.937000036239624,
564
+ "1": 0.9880000352859497,
565
+ "2": 0.9310000538825989,
566
+ "3": 0.9530000686645508
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.859,
594
+ "1": 0.974,
595
+ "2": 0.812,
596
+ "3": 0.704
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.872,
600
+ "1": 0.972,
601
+ "2": 0.856,
602
+ "3": 0.716
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.901,
606
+ "1": 0.976,
607
+ "2": 0.883,
608
+ "3": 0.848
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 0.999000072479248,
614
+ "fr": 1.0,
615
+ "de": 1.0,
616
+ "es": 0.999000072479248,
617
+ "nl": 0.999000072479248
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 0.999,
649
+ "fr": 0.829,
650
+ "de": 0.876,
651
+ "es": 0.875,
652
+ "nl": 0.852
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 0.998,
656
+ "fr": 0.852,
657
+ "de": 0.877,
658
+ "es": 0.934,
659
+ "nl": 0.857
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.999,
663
+ "fr": 0.842,
664
+ "de": 0.913,
665
+ "es": 0.936,
666
+ "nl": 0.847
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "6444c0b8-fffc-4bac-9a4d-975d3094d748",
30
+ "datetime_epoch_millis": 1737014467720,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9594187933951617,
44
+ "sae_top_1_test_accuracy": 0.7878000000000001,
45
+ "sae_top_2_test_accuracy": 0.83781875,
46
+ "sae_top_5_test_accuracy": 0.8794687499999999,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.966800057888031,
65
+ "sae_top_1_test_accuracy": 0.7698,
66
+ "sae_top_2_test_accuracy": 0.8442000000000001,
67
+ "sae_top_5_test_accuracy": 0.8894,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9504000425338746,
84
+ "sae_top_1_test_accuracy": 0.748,
85
+ "sae_top_2_test_accuracy": 0.789,
86
+ "sae_top_5_test_accuracy": 0.8560000000000001,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9360000491142273,
103
+ "sae_top_1_test_accuracy": 0.7524,
104
+ "sae_top_2_test_accuracy": 0.7812,
105
+ "sae_top_5_test_accuracy": 0.8572,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9158000469207763,
122
+ "sae_top_1_test_accuracy": 0.7363999999999999,
123
+ "sae_top_2_test_accuracy": 0.8051999999999999,
124
+ "sae_top_5_test_accuracy": 0.8412,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9805000424385071,
141
+ "sae_top_1_test_accuracy": 0.93,
142
+ "sae_top_2_test_accuracy": 0.94,
143
+ "sae_top_5_test_accuracy": 0.949,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9708000540733337,
160
+ "sae_top_1_test_accuracy": 0.6416,
161
+ "sae_top_2_test_accuracy": 0.7714000000000001,
162
+ "sae_top_5_test_accuracy": 0.8226000000000001,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9552500396966934,
179
+ "sae_top_1_test_accuracy": 0.865,
180
+ "sae_top_2_test_accuracy": 0.88175,
181
+ "sae_top_5_test_accuracy": 0.9037499999999999,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9998000144958497,
198
+ "sae_top_1_test_accuracy": 0.8592000000000001,
199
+ "sae_top_2_test_accuracy": 0.8897999999999999,
200
+ "sae_top_5_test_accuracy": 0.9165999999999999,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_PAnnealTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_0",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "p_anneal",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9490000605583191,
240
+ "1": 0.9630000591278076,
241
+ "2": 0.9540000557899475,
242
+ "6": 0.9950000643730164,
243
+ "9": 0.9730000495910645
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.584,
275
+ "1": 0.639,
276
+ "2": 0.863,
277
+ "6": 0.829,
278
+ "9": 0.934
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.766,
282
+ "1": 0.673,
283
+ "2": 0.863,
284
+ "6": 0.978,
285
+ "9": 0.941
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.842,
289
+ "1": 0.805,
290
+ "2": 0.873,
291
+ "6": 0.984,
292
+ "9": 0.943
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9590000510215759,
298
+ "13": 0.9460000395774841,
299
+ "14": 0.956000030040741,
300
+ "18": 0.9360000491142273,
301
+ "19": 0.9550000429153442
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.858,
333
+ "13": 0.683,
334
+ "14": 0.646,
335
+ "18": 0.703,
336
+ "19": 0.85
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.873,
340
+ "13": 0.67,
341
+ "14": 0.827,
342
+ "18": 0.726,
343
+ "19": 0.849
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.874,
347
+ "13": 0.885,
348
+ "14": 0.84,
349
+ "18": 0.821,
350
+ "19": 0.86
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.956000030040741,
356
+ "21": 0.9270000457763672,
357
+ "22": 0.9250000715255737,
358
+ "25": 0.971000075340271,
359
+ "26": 0.9010000228881836
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.865,
391
+ "21": 0.808,
392
+ "22": 0.576,
393
+ "25": 0.887,
394
+ "26": 0.626
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.915,
398
+ "21": 0.817,
399
+ "22": 0.623,
400
+ "25": 0.869,
401
+ "26": 0.682
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.927,
405
+ "21": 0.844,
406
+ "22": 0.832,
407
+ "25": 0.895,
408
+ "26": 0.788
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9500000476837158,
414
+ "2": 0.9410000443458557,
415
+ "3": 0.9180000424385071,
416
+ "5": 0.9170000553131104,
417
+ "6": 0.8530000448226929
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.86,
449
+ "2": 0.643,
450
+ "3": 0.666,
451
+ "5": 0.772,
452
+ "6": 0.741
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.872,
456
+ "2": 0.857,
457
+ "3": 0.711,
458
+ "5": 0.79,
459
+ "6": 0.796
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.921,
463
+ "2": 0.873,
464
+ "3": 0.804,
465
+ "5": 0.839,
466
+ "6": 0.769
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9810000658035278,
472
+ "5.0": 0.9800000190734863
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.93,
492
+ "5.0": 0.93
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.94,
496
+ "5.0": 0.94
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.949,
500
+ "5.0": 0.949
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9540000557899475,
506
+ "Python": 0.984000027179718,
507
+ "HTML": 0.9850000739097595,
508
+ "Java": 0.9720000624656677,
509
+ "PHP": 0.9590000510215759
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.643,
541
+ "Python": 0.636,
542
+ "HTML": 0.703,
543
+ "Java": 0.628,
544
+ "PHP": 0.598
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.855,
548
+ "Python": 0.656,
549
+ "HTML": 0.914,
550
+ "Java": 0.634,
551
+ "PHP": 0.798
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.887,
555
+ "Python": 0.763,
556
+ "HTML": 0.947,
557
+ "Java": 0.656,
558
+ "PHP": 0.86
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.940000057220459,
564
+ "1": 0.987000048160553,
565
+ "2": 0.9380000233650208,
566
+ "3": 0.956000030040741
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.823,
594
+ "1": 0.974,
595
+ "2": 0.822,
596
+ "3": 0.841
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.829,
600
+ "1": 0.978,
601
+ "2": 0.858,
602
+ "3": 0.862
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.877,
606
+ "1": 0.975,
607
+ "2": 0.872,
608
+ "3": 0.891
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 1.0,
614
+ "fr": 1.0,
615
+ "de": 1.0,
616
+ "es": 0.999000072479248,
617
+ "nl": 1.0
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 0.999,
649
+ "fr": 0.807,
650
+ "de": 0.874,
651
+ "es": 0.874,
652
+ "nl": 0.742
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 1.0,
656
+ "fr": 0.825,
657
+ "de": 0.875,
658
+ "es": 0.915,
659
+ "nl": 0.834
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 1.0,
663
+ "fr": 0.864,
664
+ "de": 0.954,
665
+ "es": 0.916,
666
+ "nl": 0.849
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_1_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "53a80f05-0608-4da3-af2f-c26e35c81dce",
30
+ "datetime_epoch_millis": 1737014702718,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9575562935322522,
44
+ "sae_top_1_test_accuracy": 0.78686875,
45
+ "sae_top_2_test_accuracy": 0.82335,
46
+ "sae_top_5_test_accuracy": 0.8733875000000001,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9664000511169434,
65
+ "sae_top_1_test_accuracy": 0.7699999999999999,
66
+ "sae_top_2_test_accuracy": 0.818,
67
+ "sae_top_5_test_accuracy": 0.9019999999999999,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9518000364303589,
84
+ "sae_top_1_test_accuracy": 0.7194,
85
+ "sae_top_2_test_accuracy": 0.7554,
86
+ "sae_top_5_test_accuracy": 0.8417999999999999,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9272000432014466,
103
+ "sae_top_1_test_accuracy": 0.7864,
104
+ "sae_top_2_test_accuracy": 0.8013999999999999,
105
+ "sae_top_5_test_accuracy": 0.8318000000000001,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9172000408172607,
122
+ "sae_top_1_test_accuracy": 0.7478,
123
+ "sae_top_2_test_accuracy": 0.7782,
124
+ "sae_top_5_test_accuracy": 0.8468,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9775000512599945,
141
+ "sae_top_1_test_accuracy": 0.858,
142
+ "sae_top_2_test_accuracy": 0.873,
143
+ "sae_top_5_test_accuracy": 0.929,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9690000414848328,
160
+ "sae_top_1_test_accuracy": 0.6866,
161
+ "sae_top_2_test_accuracy": 0.7714,
162
+ "sae_top_5_test_accuracy": 0.799,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9517500549554825,
179
+ "sae_top_1_test_accuracy": 0.82875,
180
+ "sae_top_2_test_accuracy": 0.8679999999999999,
181
+ "sae_top_5_test_accuracy": 0.8875,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9996000289916992,
198
+ "sae_top_1_test_accuracy": 0.898,
199
+ "sae_top_2_test_accuracy": 0.9214,
200
+ "sae_top_5_test_accuracy": 0.9491999999999999,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_PAnnealTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_1",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "p_anneal",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9450000524520874,
240
+ "1": 0.9630000591278076,
241
+ "2": 0.9540000557899475,
242
+ "6": 0.9900000691413879,
243
+ "9": 0.9800000190734863
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.589,
275
+ "1": 0.631,
276
+ "2": 0.879,
277
+ "6": 0.813,
278
+ "9": 0.938
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.614,
282
+ "1": 0.686,
283
+ "2": 0.88,
284
+ "6": 0.979,
285
+ "9": 0.931
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.857,
289
+ "1": 0.835,
290
+ "2": 0.888,
291
+ "6": 0.979,
292
+ "9": 0.951
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9640000462532043,
298
+ "13": 0.9410000443458557,
299
+ "14": 0.9570000171661377,
300
+ "18": 0.9330000281333923,
301
+ "19": 0.9640000462532043
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.73,
333
+ "13": 0.685,
334
+ "14": 0.636,
335
+ "18": 0.7,
336
+ "19": 0.846
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.724,
340
+ "13": 0.713,
341
+ "14": 0.751,
342
+ "18": 0.734,
343
+ "19": 0.855
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.875,
347
+ "13": 0.782,
348
+ "14": 0.886,
349
+ "18": 0.816,
350
+ "19": 0.85
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9530000686645508,
356
+ "21": 0.9280000329017639,
357
+ "22": 0.9080000519752502,
358
+ "25": 0.9600000381469727,
359
+ "26": 0.8870000243186951
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.874,
391
+ "21": 0.817,
392
+ "22": 0.724,
393
+ "25": 0.883,
394
+ "26": 0.634
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.913,
398
+ "21": 0.814,
399
+ "22": 0.737,
400
+ "25": 0.865,
401
+ "26": 0.678
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.913,
405
+ "21": 0.856,
406
+ "22": 0.739,
407
+ "25": 0.889,
408
+ "26": 0.762
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9410000443458557,
414
+ "2": 0.9360000491142273,
415
+ "3": 0.9190000295639038,
416
+ "5": 0.9330000281333923,
417
+ "6": 0.8570000529289246
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.822,
449
+ "2": 0.727,
450
+ "3": 0.663,
451
+ "5": 0.778,
452
+ "6": 0.749
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.886,
456
+ "2": 0.748,
457
+ "3": 0.695,
458
+ "5": 0.794,
459
+ "6": 0.768
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.93,
463
+ "2": 0.871,
464
+ "3": 0.84,
465
+ "5": 0.85,
466
+ "6": 0.743
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9770000576972961,
472
+ "5.0": 0.9780000448226929
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.858,
492
+ "5.0": 0.858
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.873,
496
+ "5.0": 0.873
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.929,
500
+ "5.0": 0.929
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9530000686645508,
506
+ "Python": 0.9920000433921814,
507
+ "HTML": 0.9860000610351562,
508
+ "Java": 0.9570000171661377,
509
+ "PHP": 0.9570000171661377
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.663,
541
+ "Python": 0.619,
542
+ "HTML": 0.909,
543
+ "Java": 0.62,
544
+ "PHP": 0.622
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.865,
548
+ "Python": 0.653,
549
+ "HTML": 0.91,
550
+ "Java": 0.646,
551
+ "PHP": 0.783
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.852,
555
+ "Python": 0.68,
556
+ "HTML": 0.948,
557
+ "Java": 0.658,
558
+ "PHP": 0.857
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.940000057220459,
564
+ "1": 0.9850000739097595,
565
+ "2": 0.937000036239624,
566
+ "3": 0.9450000524520874
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.852,
594
+ "1": 0.97,
595
+ "2": 0.84,
596
+ "3": 0.653
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.86,
600
+ "1": 0.969,
601
+ "2": 0.843,
602
+ "3": 0.8
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.859,
606
+ "1": 0.977,
607
+ "2": 0.868,
608
+ "3": 0.846
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 1.0,
614
+ "fr": 1.0,
615
+ "de": 0.999000072479248,
616
+ "es": 0.999000072479248,
617
+ "nl": 1.0
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 0.999,
649
+ "fr": 0.975,
650
+ "de": 0.875,
651
+ "es": 0.893,
652
+ "nl": 0.748
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 0.998,
656
+ "fr": 0.983,
657
+ "de": 0.889,
658
+ "es": 0.914,
659
+ "nl": 0.823
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.997,
663
+ "fr": 0.988,
664
+ "de": 0.92,
665
+ "es": 0.997,
666
+ "nl": 0.844
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_2_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "9ee9fe46-2530-41f5-913f-a71344ccfd7c",
30
+ "datetime_epoch_millis": 1737014949400,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9582875426858665,
44
+ "sae_top_1_test_accuracy": 0.7707250000000001,
45
+ "sae_top_2_test_accuracy": 0.82701875,
46
+ "sae_top_5_test_accuracy": 0.89103125,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9666000366210937,
65
+ "sae_top_1_test_accuracy": 0.7832,
66
+ "sae_top_2_test_accuracy": 0.8598000000000001,
67
+ "sae_top_5_test_accuracy": 0.9152000000000001,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9574000477790833,
84
+ "sae_top_1_test_accuracy": 0.7138,
85
+ "sae_top_2_test_accuracy": 0.7702,
86
+ "sae_top_5_test_accuracy": 0.8394,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9314000368118286,
103
+ "sae_top_1_test_accuracy": 0.7602,
104
+ "sae_top_2_test_accuracy": 0.8300000000000001,
105
+ "sae_top_5_test_accuracy": 0.8526,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9142000436782837,
122
+ "sae_top_1_test_accuracy": 0.712,
123
+ "sae_top_2_test_accuracy": 0.7522,
124
+ "sae_top_5_test_accuracy": 0.8394000000000001,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9755000472068787,
141
+ "sae_top_1_test_accuracy": 0.749,
142
+ "sae_top_2_test_accuracy": 0.807,
143
+ "sae_top_5_test_accuracy": 0.933,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9678000450134278,
160
+ "sae_top_1_test_accuracy": 0.6826,
161
+ "sae_top_2_test_accuracy": 0.7959999999999999,
162
+ "sae_top_5_test_accuracy": 0.9108,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9540000408887863,
179
+ "sae_top_1_test_accuracy": 0.857,
180
+ "sae_top_2_test_accuracy": 0.8767499999999999,
181
+ "sae_top_5_test_accuracy": 0.8912500000000001,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9994000434875489,
198
+ "sae_top_1_test_accuracy": 0.908,
199
+ "sae_top_2_test_accuracy": 0.9241999999999999,
200
+ "sae_top_5_test_accuracy": 0.9465999999999999,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_PAnnealTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_2",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "p_anneal",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9470000267028809,
240
+ "1": 0.9610000252723694,
241
+ "2": 0.9550000429153442,
242
+ "6": 0.9930000305175781,
243
+ "9": 0.9770000576972961
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.577,
275
+ "1": 0.633,
276
+ "2": 0.874,
277
+ "6": 0.977,
278
+ "9": 0.855
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.821,
282
+ "1": 0.666,
283
+ "2": 0.906,
284
+ "6": 0.977,
285
+ "9": 0.929
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.849,
289
+ "1": 0.904,
290
+ "2": 0.906,
291
+ "6": 0.982,
292
+ "9": 0.935
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9670000672340393,
298
+ "13": 0.9610000252723694,
299
+ "14": 0.9580000638961792,
300
+ "18": 0.9350000619888306,
301
+ "19": 0.9660000205039978
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.727,
333
+ "13": 0.676,
334
+ "14": 0.636,
335
+ "18": 0.686,
336
+ "19": 0.844
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.731,
340
+ "13": 0.697,
341
+ "14": 0.841,
342
+ "18": 0.735,
343
+ "19": 0.847
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.868,
347
+ "13": 0.788,
348
+ "14": 0.859,
349
+ "18": 0.823,
350
+ "19": 0.859
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9520000219345093,
356
+ "21": 0.9290000200271606,
357
+ "22": 0.9170000553131104,
358
+ "25": 0.9590000510215759,
359
+ "26": 0.9000000357627869
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.906,
391
+ "21": 0.805,
392
+ "22": 0.613,
393
+ "25": 0.88,
394
+ "26": 0.597
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.922,
398
+ "21": 0.842,
399
+ "22": 0.832,
400
+ "25": 0.863,
401
+ "26": 0.691
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.929,
405
+ "21": 0.853,
406
+ "22": 0.836,
407
+ "25": 0.891,
408
+ "26": 0.754
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9450000524520874,
414
+ "2": 0.9260000586509705,
415
+ "3": 0.921000063419342,
416
+ "5": 0.9200000166893005,
417
+ "6": 0.859000027179718
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.848,
449
+ "2": 0.65,
450
+ "3": 0.567,
451
+ "5": 0.772,
452
+ "6": 0.723
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.846,
456
+ "2": 0.732,
457
+ "3": 0.66,
458
+ "5": 0.792,
459
+ "6": 0.731
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.922,
463
+ "2": 0.859,
464
+ "3": 0.789,
465
+ "5": 0.865,
466
+ "6": 0.762
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9770000576972961,
472
+ "5.0": 0.9740000367164612
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.749,
492
+ "5.0": 0.749
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.807,
496
+ "5.0": 0.807
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.933,
500
+ "5.0": 0.933
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9470000267028809,
506
+ "Python": 0.9820000529289246,
507
+ "HTML": 0.9850000739097595,
508
+ "Java": 0.9650000333786011,
509
+ "PHP": 0.9600000381469727
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.618,
541
+ "Python": 0.622,
542
+ "HTML": 0.927,
543
+ "Java": 0.641,
544
+ "PHP": 0.605
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.855,
548
+ "Python": 0.661,
549
+ "HTML": 0.928,
550
+ "Java": 0.659,
551
+ "PHP": 0.877
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.874,
555
+ "Python": 0.913,
556
+ "HTML": 0.947,
557
+ "Java": 0.911,
558
+ "PHP": 0.909
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.940000057220459,
564
+ "1": 0.9900000691413879,
565
+ "2": 0.9340000152587891,
566
+ "3": 0.9520000219345093
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.827,
594
+ "1": 0.963,
595
+ "2": 0.808,
596
+ "3": 0.83
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.838,
600
+ "1": 0.973,
601
+ "2": 0.832,
602
+ "3": 0.864
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.865,
606
+ "1": 0.968,
607
+ "2": 0.875,
608
+ "3": 0.857
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 1.0,
614
+ "fr": 1.0,
615
+ "de": 0.999000072479248,
616
+ "es": 0.999000072479248,
617
+ "nl": 0.999000072479248
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 0.999,
649
+ "fr": 0.994,
650
+ "de": 0.882,
651
+ "es": 0.899,
652
+ "nl": 0.766
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 0.999,
656
+ "fr": 0.992,
657
+ "de": 0.888,
658
+ "es": 0.92,
659
+ "nl": 0.822
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.998,
663
+ "fr": 0.993,
664
+ "de": 0.896,
665
+ "es": 0.989,
666
+ "nl": 0.857
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_3_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "40e62e78-d24b-44c6-abcc-60b9c93cf525",
30
+ "datetime_epoch_millis": 1737015185917,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9533875472843647,
44
+ "sae_top_1_test_accuracy": 0.7733562500000001,
45
+ "sae_top_2_test_accuracy": 0.82645,
46
+ "sae_top_5_test_accuracy": 0.8762062499999999,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9610000491142273,
65
+ "sae_top_1_test_accuracy": 0.788,
66
+ "sae_top_2_test_accuracy": 0.8238,
67
+ "sae_top_5_test_accuracy": 0.8904,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9512000441551208,
84
+ "sae_top_1_test_accuracy": 0.7636,
85
+ "sae_top_2_test_accuracy": 0.7906,
86
+ "sae_top_5_test_accuracy": 0.8552,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9232000350952149,
103
+ "sae_top_1_test_accuracy": 0.7777999999999999,
104
+ "sae_top_2_test_accuracy": 0.8062000000000001,
105
+ "sae_top_5_test_accuracy": 0.8455999999999999,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9056000471115112,
122
+ "sae_top_1_test_accuracy": 0.68,
123
+ "sae_top_2_test_accuracy": 0.7325999999999999,
124
+ "sae_top_5_test_accuracy": 0.8024000000000001,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.971000075340271,
141
+ "sae_top_1_test_accuracy": 0.757,
142
+ "sae_top_2_test_accuracy": 0.9,
143
+ "sae_top_5_test_accuracy": 0.932,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9676000475883484,
160
+ "sae_top_1_test_accuracy": 0.6732,
161
+ "sae_top_2_test_accuracy": 0.7777999999999998,
162
+ "sae_top_5_test_accuracy": 0.8572000000000001,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9495000541210175,
179
+ "sae_top_1_test_accuracy": 0.84325,
180
+ "sae_top_2_test_accuracy": 0.8579999999999999,
181
+ "sae_top_5_test_accuracy": 0.88125,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9980000257492065,
198
+ "sae_top_1_test_accuracy": 0.9040000000000001,
199
+ "sae_top_2_test_accuracy": 0.9226000000000001,
200
+ "sae_top_5_test_accuracy": 0.9456000000000001,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_PAnnealTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_3",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "p_anneal",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9410000443458557,
240
+ "1": 0.9530000686645508,
241
+ "2": 0.9510000348091125,
242
+ "6": 0.987000048160553,
243
+ "9": 0.9730000495910645
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.589,
275
+ "1": 0.62,
276
+ "2": 0.814,
277
+ "6": 0.977,
278
+ "9": 0.94
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.624,
282
+ "1": 0.672,
283
+ "2": 0.896,
284
+ "6": 0.979,
285
+ "9": 0.948
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.877,
289
+ "1": 0.745,
290
+ "2": 0.896,
291
+ "6": 0.98,
292
+ "9": 0.954
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9670000672340393,
298
+ "13": 0.9580000638961792,
299
+ "14": 0.9520000219345093,
300
+ "18": 0.9190000295639038,
301
+ "19": 0.9600000381469727
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.734,
333
+ "13": 0.687,
334
+ "14": 0.85,
335
+ "18": 0.706,
336
+ "19": 0.841
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.73,
340
+ "13": 0.797,
341
+ "14": 0.848,
342
+ "18": 0.73,
343
+ "19": 0.848
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.919,
347
+ "13": 0.791,
348
+ "14": 0.855,
349
+ "18": 0.854,
350
+ "19": 0.857
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.956000030040741,
356
+ "21": 0.9190000295639038,
357
+ "22": 0.9050000309944153,
358
+ "25": 0.9490000605583191,
359
+ "26": 0.8870000243186951
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.873,
391
+ "21": 0.829,
392
+ "22": 0.695,
393
+ "25": 0.883,
394
+ "26": 0.609
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.882,
398
+ "21": 0.843,
399
+ "22": 0.745,
400
+ "25": 0.854,
401
+ "26": 0.707
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.897,
405
+ "21": 0.852,
406
+ "22": 0.786,
407
+ "25": 0.919,
408
+ "26": 0.774
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.940000057220459,
414
+ "2": 0.9280000329017639,
415
+ "3": 0.9130000472068787,
416
+ "5": 0.9040000438690186,
417
+ "6": 0.843000054359436
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.841,
449
+ "2": 0.614,
450
+ "3": 0.631,
451
+ "5": 0.564,
452
+ "6": 0.75
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.844,
456
+ "2": 0.73,
457
+ "3": 0.718,
458
+ "5": 0.606,
459
+ "6": 0.765
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.921,
463
+ "2": 0.765,
464
+ "3": 0.774,
465
+ "5": 0.785,
466
+ "6": 0.767
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.971000075340271,
472
+ "5.0": 0.971000075340271
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.757,
492
+ "5.0": 0.757
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.9,
496
+ "5.0": 0.9
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.932,
500
+ "5.0": 0.932
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9500000476837158,
506
+ "Python": 0.9790000319480896,
507
+ "HTML": 0.984000027179718,
508
+ "Java": 0.9670000672340393,
509
+ "PHP": 0.9580000638961792
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.599,
541
+ "Python": 0.626,
542
+ "HTML": 0.913,
543
+ "Java": 0.628,
544
+ "PHP": 0.6
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.632,
548
+ "Python": 0.679,
549
+ "HTML": 0.917,
550
+ "Java": 0.768,
551
+ "PHP": 0.893
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.691,
555
+ "Python": 0.933,
556
+ "HTML": 0.947,
557
+ "Java": 0.801,
558
+ "PHP": 0.914
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.9410000443458557,
564
+ "1": 0.987000048160553,
565
+ "2": 0.921000063419342,
566
+ "3": 0.9490000605583191
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.838,
594
+ "1": 0.948,
595
+ "2": 0.81,
596
+ "3": 0.777
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.84,
600
+ "1": 0.957,
601
+ "2": 0.843,
602
+ "3": 0.792
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.869,
606
+ "1": 0.97,
607
+ "2": 0.844,
608
+ "3": 0.842
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 0.9980000257492065,
614
+ "fr": 0.9980000257492065,
615
+ "de": 0.9980000257492065,
616
+ "es": 0.9980000257492065,
617
+ "nl": 0.9980000257492065
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 1.0,
649
+ "fr": 0.992,
650
+ "de": 0.882,
651
+ "es": 0.894,
652
+ "nl": 0.752
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 0.999,
656
+ "fr": 0.995,
657
+ "de": 0.884,
658
+ "es": 0.905,
659
+ "nl": 0.83
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 1.0,
663
+ "fr": 0.991,
664
+ "de": 0.901,
665
+ "es": 0.996,
666
+ "nl": 0.84
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "768cb6c1-1ad9-4643-8325-387f236171e1",
30
+ "datetime_epoch_millis": 1737015420221,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9507500395178794,
44
+ "sae_top_1_test_accuracy": 0.7770812500000002,
45
+ "sae_top_2_test_accuracy": 0.8184625,
46
+ "sae_top_5_test_accuracy": 0.8761312499999999,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9630000472068787,
65
+ "sae_top_1_test_accuracy": 0.7964,
66
+ "sae_top_2_test_accuracy": 0.8184000000000001,
67
+ "sae_top_5_test_accuracy": 0.9126,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9440000414848327,
84
+ "sae_top_1_test_accuracy": 0.7588,
85
+ "sae_top_2_test_accuracy": 0.7964,
86
+ "sae_top_5_test_accuracy": 0.8794000000000001,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9226000428199768,
103
+ "sae_top_1_test_accuracy": 0.762,
104
+ "sae_top_2_test_accuracy": 0.7844,
105
+ "sae_top_5_test_accuracy": 0.8219999999999998,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9040000438690186,
122
+ "sae_top_1_test_accuracy": 0.6838,
123
+ "sae_top_2_test_accuracy": 0.769,
124
+ "sae_top_5_test_accuracy": 0.8022,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9645000398159027,
141
+ "sae_top_1_test_accuracy": 0.772,
142
+ "sae_top_2_test_accuracy": 0.829,
143
+ "sae_top_5_test_accuracy": 0.943,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9646000266075134,
160
+ "sae_top_1_test_accuracy": 0.6759999999999999,
161
+ "sae_top_2_test_accuracy": 0.758,
162
+ "sae_top_5_test_accuracy": 0.8362,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9455000460147858,
179
+ "sae_top_1_test_accuracy": 0.83225,
180
+ "sae_top_2_test_accuracy": 0.8574999999999999,
181
+ "sae_top_5_test_accuracy": 0.87425,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9978000283241272,
198
+ "sae_top_1_test_accuracy": 0.9354000000000001,
199
+ "sae_top_2_test_accuracy": 0.9349999999999999,
200
+ "sae_top_5_test_accuracy": 0.9394,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_PAnnealTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_4",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "p_anneal",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9360000491142273,
240
+ "1": 0.9610000252723694,
241
+ "2": 0.9550000429153442,
242
+ "6": 0.987000048160553,
243
+ "9": 0.9760000705718994
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.621,
275
+ "1": 0.623,
276
+ "2": 0.82,
277
+ "6": 0.977,
278
+ "9": 0.941
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.641,
282
+ "1": 0.706,
283
+ "2": 0.829,
284
+ "6": 0.971,
285
+ "9": 0.945
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.839,
289
+ "1": 0.884,
290
+ "2": 0.919,
291
+ "6": 0.978,
292
+ "9": 0.943
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.956000030040741,
298
+ "13": 0.9510000348091125,
299
+ "14": 0.9490000605583191,
300
+ "18": 0.9150000214576721,
301
+ "19": 0.9490000605583191
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.731,
333
+ "13": 0.686,
334
+ "14": 0.859,
335
+ "18": 0.687,
336
+ "19": 0.831
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.851,
340
+ "13": 0.689,
341
+ "14": 0.876,
342
+ "18": 0.725,
343
+ "19": 0.841
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.934,
347
+ "13": 0.877,
348
+ "14": 0.875,
349
+ "18": 0.871,
350
+ "19": 0.84
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9540000557899475,
356
+ "21": 0.921000063419342,
357
+ "22": 0.9050000309944153,
358
+ "25": 0.956000030040741,
359
+ "26": 0.8770000338554382
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.812,
391
+ "21": 0.824,
392
+ "22": 0.606,
393
+ "25": 0.863,
394
+ "26": 0.705
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.878,
398
+ "21": 0.86,
399
+ "22": 0.606,
400
+ "25": 0.866,
401
+ "26": 0.712
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.899,
405
+ "21": 0.852,
406
+ "22": 0.682,
407
+ "25": 0.9,
408
+ "26": 0.777
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9410000443458557,
414
+ "2": 0.9180000424385071,
415
+ "3": 0.9070000648498535,
416
+ "5": 0.9130000472068787,
417
+ "6": 0.8410000205039978
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.826,
449
+ "2": 0.617,
450
+ "3": 0.682,
451
+ "5": 0.532,
452
+ "6": 0.762
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.835,
456
+ "2": 0.706,
457
+ "3": 0.728,
458
+ "5": 0.798,
459
+ "6": 0.778
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.915,
463
+ "2": 0.735,
464
+ "3": 0.785,
465
+ "5": 0.803,
466
+ "6": 0.773
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9650000333786011,
472
+ "5.0": 0.9640000462532043
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.772,
492
+ "5.0": 0.772
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.829,
496
+ "5.0": 0.829
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.943,
500
+ "5.0": 0.943
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9460000395774841,
506
+ "Python": 0.9790000319480896,
507
+ "HTML": 0.984000027179718,
508
+ "Java": 0.9570000171661377,
509
+ "PHP": 0.9570000171661377
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.612,
541
+ "Python": 0.623,
542
+ "HTML": 0.904,
543
+ "Java": 0.643,
544
+ "PHP": 0.598
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.624,
548
+ "Python": 0.664,
549
+ "HTML": 0.896,
550
+ "Java": 0.698,
551
+ "PHP": 0.908
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.677,
555
+ "Python": 0.943,
556
+ "HTML": 0.94,
557
+ "Java": 0.704,
558
+ "PHP": 0.917
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.9350000619888306,
564
+ "1": 0.9810000658035278,
565
+ "2": 0.9320000410079956,
566
+ "3": 0.9340000152587891
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.786,
594
+ "1": 0.936,
595
+ "2": 0.808,
596
+ "3": 0.799
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.829,
600
+ "1": 0.955,
601
+ "2": 0.84,
602
+ "3": 0.806
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.853,
606
+ "1": 0.96,
607
+ "2": 0.847,
608
+ "3": 0.837
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 1.0,
614
+ "fr": 0.9980000257492065,
615
+ "de": 0.9950000643730164,
616
+ "es": 1.0,
617
+ "nl": 0.9960000514984131
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 0.998,
649
+ "fr": 0.997,
650
+ "de": 0.881,
651
+ "es": 0.964,
652
+ "nl": 0.837
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 1.0,
656
+ "fr": 0.995,
657
+ "de": 0.893,
658
+ "es": 0.963,
659
+ "nl": 0.824
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.999,
663
+ "fr": 0.995,
664
+ "de": 0.889,
665
+ "es": 0.967,
666
+ "nl": 0.847
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_PAnneal_gemma-2-2b__0108_resid_post_layer_12_trainer_5_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "070cb564-1c0f-4245-b2fd-c51e0ca628be",
30
+ "datetime_epoch_millis": 1737015653401,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9467937961220741,
44
+ "sae_top_1_test_accuracy": 0.7678625,
45
+ "sae_top_2_test_accuracy": 0.8060125,
46
+ "sae_top_5_test_accuracy": 0.85404375,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9622000455856323,
65
+ "sae_top_1_test_accuracy": 0.8096,
66
+ "sae_top_2_test_accuracy": 0.8232000000000002,
67
+ "sae_top_5_test_accuracy": 0.851,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9356000423431396,
84
+ "sae_top_1_test_accuracy": 0.7615999999999999,
85
+ "sae_top_2_test_accuracy": 0.7933999999999999,
86
+ "sae_top_5_test_accuracy": 0.8524,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9190000414848327,
103
+ "sae_top_1_test_accuracy": 0.7325999999999999,
104
+ "sae_top_2_test_accuracy": 0.7757999999999999,
105
+ "sae_top_5_test_accuracy": 0.8096,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9012000441551209,
122
+ "sae_top_1_test_accuracy": 0.7278,
123
+ "sae_top_2_test_accuracy": 0.765,
124
+ "sae_top_5_test_accuracy": 0.8123999999999999,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9595000445842743,
141
+ "sae_top_1_test_accuracy": 0.782,
142
+ "sae_top_2_test_accuracy": 0.83,
143
+ "sae_top_5_test_accuracy": 0.906,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9604000568389892,
160
+ "sae_top_1_test_accuracy": 0.6782,
161
+ "sae_top_2_test_accuracy": 0.7566,
162
+ "sae_top_5_test_accuracy": 0.8318000000000001,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9462500512599945,
179
+ "sae_top_1_test_accuracy": 0.7935,
180
+ "sae_top_2_test_accuracy": 0.8285,
181
+ "sae_top_5_test_accuracy": 0.86775,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9902000427246094,
198
+ "sae_top_1_test_accuracy": 0.8576,
199
+ "sae_top_2_test_accuracy": 0.8756,
200
+ "sae_top_5_test_accuracy": 0.9014,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_PAnnealTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_5",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "p_anneal",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9360000491142273,
240
+ "1": 0.9580000638961792,
241
+ "2": 0.9480000734329224,
242
+ "6": 0.9940000176429749,
243
+ "9": 0.9750000238418579
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.621,
275
+ "1": 0.621,
276
+ "2": 0.883,
277
+ "6": 0.979,
278
+ "9": 0.944
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.622,
282
+ "1": 0.697,
283
+ "2": 0.881,
284
+ "6": 0.974,
285
+ "9": 0.942
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.731,
289
+ "1": 0.716,
290
+ "2": 0.879,
291
+ "6": 0.98,
292
+ "9": 0.949
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.956000030040741,
298
+ "13": 0.9470000267028809,
299
+ "14": 0.9220000505447388,
300
+ "18": 0.8980000615119934,
301
+ "19": 0.9550000429153442
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.726,
333
+ "13": 0.686,
334
+ "14": 0.859,
335
+ "18": 0.696,
336
+ "19": 0.841
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.868,
340
+ "13": 0.671,
341
+ "14": 0.865,
342
+ "18": 0.719,
343
+ "19": 0.844
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.92,
347
+ "13": 0.748,
348
+ "14": 0.87,
349
+ "18": 0.878,
350
+ "19": 0.846
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9450000524520874,
356
+ "21": 0.9220000505447388,
357
+ "22": 0.9010000228881836,
358
+ "25": 0.9530000686645508,
359
+ "26": 0.8740000128746033
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.852,
391
+ "21": 0.608,
392
+ "22": 0.61,
393
+ "25": 0.866,
394
+ "26": 0.727
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.897,
398
+ "21": 0.78,
399
+ "22": 0.607,
400
+ "25": 0.871,
401
+ "26": 0.724
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.91,
405
+ "21": 0.792,
406
+ "22": 0.666,
407
+ "25": 0.902,
408
+ "26": 0.778
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9420000314712524,
414
+ "2": 0.9310000538825989,
415
+ "3": 0.8990000486373901,
416
+ "5": 0.8910000324249268,
417
+ "6": 0.843000054359436
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.819,
449
+ "2": 0.601,
450
+ "3": 0.686,
451
+ "5": 0.812,
452
+ "6": 0.721
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.855,
456
+ "2": 0.658,
457
+ "3": 0.754,
458
+ "5": 0.806,
459
+ "6": 0.752
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.901,
463
+ "2": 0.757,
464
+ "3": 0.802,
465
+ "5": 0.84,
466
+ "6": 0.762
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9600000381469727,
472
+ "5.0": 0.9590000510215759
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.782,
492
+ "5.0": 0.782
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.83,
496
+ "5.0": 0.83
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.906,
500
+ "5.0": 0.906
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9390000700950623,
506
+ "Python": 0.9730000495910645,
507
+ "HTML": 0.9850000739097595,
508
+ "Java": 0.956000030040741,
509
+ "PHP": 0.9490000605583191
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.622,
541
+ "Python": 0.608,
542
+ "HTML": 0.915,
543
+ "Java": 0.642,
544
+ "PHP": 0.604
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.633,
548
+ "Python": 0.672,
549
+ "HTML": 0.902,
550
+ "Java": 0.661,
551
+ "PHP": 0.915
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.696,
555
+ "Python": 0.92,
556
+ "HTML": 0.947,
557
+ "Java": 0.669,
558
+ "PHP": 0.927
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.9320000410079956,
564
+ "1": 0.9810000658035278,
565
+ "2": 0.9260000586509705,
566
+ "3": 0.9460000395774841
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.758,
594
+ "1": 0.92,
595
+ "2": 0.714,
596
+ "3": 0.782
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.767,
600
+ "1": 0.942,
601
+ "2": 0.818,
602
+ "3": 0.787
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.874,
606
+ "1": 0.945,
607
+ "2": 0.833,
608
+ "3": 0.819
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 0.9970000386238098,
614
+ "fr": 0.9850000739097595,
615
+ "de": 0.9940000176429749,
616
+ "es": 0.9880000352859497,
617
+ "nl": 0.987000048160553
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 0.998,
649
+ "fr": 0.747,
650
+ "de": 0.895,
651
+ "es": 0.906,
652
+ "nl": 0.742
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 1.0,
656
+ "fr": 0.772,
657
+ "de": 0.896,
658
+ "es": 0.892,
659
+ "nl": 0.818
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 1.0,
663
+ "fr": 0.87,
664
+ "de": 0.887,
665
+ "es": 0.9,
666
+ "nl": 0.85
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "f3c7e008-111a-4b86-b9af-19ba29046ce7",
30
+ "datetime_epoch_millis": 1737015866101,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9590375449508428,
44
+ "sae_top_1_test_accuracy": 0.795425,
45
+ "sae_top_2_test_accuracy": 0.8498,
46
+ "sae_top_5_test_accuracy": 0.88571875,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9678000450134278,
65
+ "sae_top_1_test_accuracy": 0.8092,
66
+ "sae_top_2_test_accuracy": 0.8348000000000001,
67
+ "sae_top_5_test_accuracy": 0.9103999999999999,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9540000438690186,
84
+ "sae_top_1_test_accuracy": 0.7924,
85
+ "sae_top_2_test_accuracy": 0.8426,
86
+ "sae_top_5_test_accuracy": 0.857,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9314000606536865,
103
+ "sae_top_1_test_accuracy": 0.7922,
104
+ "sae_top_2_test_accuracy": 0.833,
105
+ "sae_top_5_test_accuracy": 0.8615999999999999,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9218000411987305,
122
+ "sae_top_1_test_accuracy": 0.725,
123
+ "sae_top_2_test_accuracy": 0.7949999999999999,
124
+ "sae_top_5_test_accuracy": 0.8555999999999999,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9770000576972961,
141
+ "sae_top_1_test_accuracy": 0.746,
142
+ "sae_top_2_test_accuracy": 0.895,
143
+ "sae_top_5_test_accuracy": 0.932,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.968600046634674,
160
+ "sae_top_1_test_accuracy": 0.7792,
161
+ "sae_top_2_test_accuracy": 0.8248,
162
+ "sae_top_5_test_accuracy": 0.8828000000000001,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9525000303983688,
179
+ "sae_top_1_test_accuracy": 0.832,
180
+ "sae_top_2_test_accuracy": 0.873,
181
+ "sae_top_5_test_accuracy": 0.88575,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9992000341415406,
198
+ "sae_top_1_test_accuracy": 0.8874000000000001,
199
+ "sae_top_2_test_accuracy": 0.9002000000000001,
200
+ "sae_top_5_test_accuracy": 0.9006000000000001,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_StandardTrainerAprilUpdate_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_0",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "standard_april_update",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9500000476837158,
240
+ "1": 0.9640000462532043,
241
+ "2": 0.9570000171661377,
242
+ "6": 0.9900000691413879,
243
+ "9": 0.9780000448226929
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.628,
275
+ "1": 0.682,
276
+ "2": 0.808,
277
+ "6": 0.979,
278
+ "9": 0.949
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.605,
282
+ "1": 0.803,
283
+ "2": 0.834,
284
+ "6": 0.983,
285
+ "9": 0.949
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.82,
289
+ "1": 0.926,
290
+ "2": 0.871,
291
+ "6": 0.973,
292
+ "9": 0.962
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9670000672340393,
298
+ "13": 0.956000030040741,
299
+ "14": 0.9580000638961792,
300
+ "18": 0.9280000329017639,
301
+ "19": 0.9610000252723694
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.862,
333
+ "13": 0.769,
334
+ "14": 0.749,
335
+ "18": 0.725,
336
+ "19": 0.857
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.867,
340
+ "13": 0.792,
341
+ "14": 0.876,
342
+ "18": 0.828,
343
+ "19": 0.85
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.871,
347
+ "13": 0.783,
348
+ "14": 0.87,
349
+ "18": 0.859,
350
+ "19": 0.902
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9600000381469727,
356
+ "21": 0.9300000667572021,
357
+ "22": 0.9120000600814819,
358
+ "25": 0.9620000720024109,
359
+ "26": 0.893000066280365
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.877,
391
+ "21": 0.804,
392
+ "22": 0.7,
393
+ "25": 0.884,
394
+ "26": 0.696
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.879,
398
+ "21": 0.825,
399
+ "22": 0.807,
400
+ "25": 0.9,
401
+ "26": 0.754
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.914,
405
+ "21": 0.828,
406
+ "22": 0.808,
407
+ "25": 0.932,
408
+ "26": 0.826
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9440000653266907,
414
+ "2": 0.9350000619888306,
415
+ "3": 0.9240000247955322,
416
+ "5": 0.9330000281333923,
417
+ "6": 0.8730000257492065
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.849,
449
+ "2": 0.617,
450
+ "3": 0.663,
451
+ "5": 0.781,
452
+ "6": 0.715
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.911,
456
+ "2": 0.682,
457
+ "3": 0.763,
458
+ "5": 0.856,
459
+ "6": 0.763
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.926,
463
+ "2": 0.879,
464
+ "3": 0.833,
465
+ "5": 0.867,
466
+ "6": 0.773
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9770000576972961,
472
+ "5.0": 0.9770000576972961
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.746,
492
+ "5.0": 0.746
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.895,
496
+ "5.0": 0.895
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.932,
500
+ "5.0": 0.932
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9570000171661377,
506
+ "Python": 0.9850000739097595,
507
+ "HTML": 0.9820000529289246,
508
+ "Java": 0.9630000591278076,
509
+ "PHP": 0.956000030040741
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.621,
541
+ "Python": 0.887,
542
+ "HTML": 0.866,
543
+ "Java": 0.631,
544
+ "PHP": 0.891
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.836,
548
+ "Python": 0.888,
549
+ "HTML": 0.886,
550
+ "Java": 0.641,
551
+ "PHP": 0.873
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.861,
555
+ "Python": 0.929,
556
+ "HTML": 0.951,
557
+ "Java": 0.769,
558
+ "PHP": 0.904
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.9340000152587891,
564
+ "1": 0.9860000610351562,
565
+ "2": 0.9340000152587891,
566
+ "3": 0.956000030040741
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.825,
594
+ "1": 0.839,
595
+ "2": 0.849,
596
+ "3": 0.815
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.851,
600
+ "1": 0.962,
601
+ "2": 0.848,
602
+ "3": 0.831
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.849,
606
+ "1": 0.958,
607
+ "2": 0.858,
608
+ "3": 0.878
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 0.999000072479248,
614
+ "fr": 1.0,
615
+ "de": 1.0,
616
+ "es": 0.999000072479248,
617
+ "nl": 0.9980000257492065
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 0.999,
649
+ "fr": 0.824,
650
+ "de": 0.886,
651
+ "es": 0.883,
652
+ "nl": 0.845
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 0.999,
656
+ "fr": 0.836,
657
+ "de": 0.895,
658
+ "es": 0.918,
659
+ "nl": 0.853
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.999,
663
+ "fr": 0.861,
664
+ "de": 0.894,
665
+ "es": 0.911,
666
+ "nl": 0.838
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_1_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "71c6dc04-cac3-46f3-8c0f-6c099d9af3c4",
30
+ "datetime_epoch_millis": 1737016084526,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9566687937825917,
44
+ "sae_top_1_test_accuracy": 0.7946687499999998,
45
+ "sae_top_2_test_accuracy": 0.8568187500000001,
46
+ "sae_top_5_test_accuracy": 0.894925,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9676000475883484,
65
+ "sae_top_1_test_accuracy": 0.8001999999999999,
66
+ "sae_top_2_test_accuracy": 0.8899999999999999,
67
+ "sae_top_5_test_accuracy": 0.9109999999999999,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9490000486373902,
84
+ "sae_top_1_test_accuracy": 0.8117999999999999,
85
+ "sae_top_2_test_accuracy": 0.8522000000000001,
86
+ "sae_top_5_test_accuracy": 0.8695999999999999,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9264000535011292,
103
+ "sae_top_1_test_accuracy": 0.7424,
104
+ "sae_top_2_test_accuracy": 0.796,
105
+ "sae_top_5_test_accuracy": 0.8378,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.914400041103363,
122
+ "sae_top_1_test_accuracy": 0.735,
123
+ "sae_top_2_test_accuracy": 0.805,
124
+ "sae_top_5_test_accuracy": 0.8530000000000001,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9755000472068787,
141
+ "sae_top_1_test_accuracy": 0.753,
142
+ "sae_top_2_test_accuracy": 0.893,
143
+ "sae_top_5_test_accuracy": 0.943,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9688000440597534,
160
+ "sae_top_1_test_accuracy": 0.7848,
161
+ "sae_top_2_test_accuracy": 0.836,
162
+ "sae_top_5_test_accuracy": 0.8826,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9522500485181808,
179
+ "sae_top_1_test_accuracy": 0.81875,
180
+ "sae_top_2_test_accuracy": 0.8647499999999999,
181
+ "sae_top_5_test_accuracy": 0.8969999999999999,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9994000196456909,
198
+ "sae_top_1_test_accuracy": 0.9113999999999999,
199
+ "sae_top_2_test_accuracy": 0.9176,
200
+ "sae_top_5_test_accuracy": 0.9654,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_StandardTrainerAprilUpdate_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_1",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "standard_april_update",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9480000734329224,
240
+ "1": 0.9570000171661377,
241
+ "2": 0.9620000720024109,
242
+ "6": 0.9940000176429749,
243
+ "9": 0.9770000576972961
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.627,
275
+ "1": 0.673,
276
+ "2": 0.774,
277
+ "6": 0.977,
278
+ "9": 0.95
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.847,
282
+ "1": 0.806,
283
+ "2": 0.872,
284
+ "6": 0.981,
285
+ "9": 0.944
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.866,
289
+ "1": 0.881,
290
+ "2": 0.882,
291
+ "6": 0.98,
292
+ "9": 0.946
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9630000591278076,
298
+ "13": 0.9490000605583191,
299
+ "14": 0.9420000314712524,
300
+ "18": 0.9300000667572021,
301
+ "19": 0.9610000252723694
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.866,
333
+ "13": 0.767,
334
+ "14": 0.853,
335
+ "18": 0.731,
336
+ "19": 0.842
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.875,
340
+ "13": 0.806,
341
+ "14": 0.875,
342
+ "18": 0.833,
343
+ "19": 0.872
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.889,
347
+ "13": 0.831,
348
+ "14": 0.88,
349
+ "18": 0.86,
350
+ "19": 0.888
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9540000557899475,
356
+ "21": 0.9190000295639038,
357
+ "22": 0.9080000519752502,
358
+ "25": 0.9620000720024109,
359
+ "26": 0.8890000581741333
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.878,
391
+ "21": 0.647,
392
+ "22": 0.601,
393
+ "25": 0.888,
394
+ "26": 0.698
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.918,
398
+ "21": 0.685,
399
+ "22": 0.706,
400
+ "25": 0.897,
401
+ "26": 0.774
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.924,
405
+ "21": 0.799,
406
+ "22": 0.738,
407
+ "25": 0.925,
408
+ "26": 0.803
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9580000638961792,
414
+ "2": 0.9270000457763672,
415
+ "3": 0.906000018119812,
416
+ "5": 0.9170000553131104,
417
+ "6": 0.8640000224113464
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.838,
449
+ "2": 0.634,
450
+ "3": 0.684,
451
+ "5": 0.791,
452
+ "6": 0.728
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.907,
456
+ "2": 0.748,
457
+ "3": 0.756,
458
+ "5": 0.866,
459
+ "6": 0.748
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.927,
463
+ "2": 0.889,
464
+ "3": 0.809,
465
+ "5": 0.87,
466
+ "6": 0.77
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9750000238418579,
472
+ "5.0": 0.9760000705718994
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.753,
492
+ "5.0": 0.753
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.893,
496
+ "5.0": 0.893
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.943,
500
+ "5.0": 0.943
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.956000030040741,
506
+ "Python": 0.9820000529289246,
507
+ "HTML": 0.987000048160553,
508
+ "Java": 0.9630000591278076,
509
+ "PHP": 0.956000030040741
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.622,
541
+ "Python": 0.886,
542
+ "HTML": 0.881,
543
+ "Java": 0.649,
544
+ "PHP": 0.886
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.846,
548
+ "Python": 0.889,
549
+ "HTML": 0.908,
550
+ "Java": 0.652,
551
+ "PHP": 0.885
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.855,
555
+ "Python": 0.93,
556
+ "HTML": 0.948,
557
+ "Java": 0.785,
558
+ "PHP": 0.895
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.9430000185966492,
564
+ "1": 0.9900000691413879,
565
+ "2": 0.9260000586509705,
566
+ "3": 0.9500000476837158
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.831,
594
+ "1": 0.947,
595
+ "2": 0.831,
596
+ "3": 0.666
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.831,
600
+ "1": 0.964,
601
+ "2": 0.852,
602
+ "3": 0.812
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.879,
606
+ "1": 0.968,
607
+ "2": 0.849,
608
+ "3": 0.892
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 1.0,
614
+ "fr": 1.0,
615
+ "de": 1.0,
616
+ "es": 0.999000072479248,
617
+ "nl": 0.9980000257492065
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 0.999,
649
+ "fr": 0.834,
650
+ "de": 0.887,
651
+ "es": 0.993,
652
+ "nl": 0.844
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 0.998,
656
+ "fr": 0.855,
657
+ "de": 0.887,
658
+ "es": 0.992,
659
+ "nl": 0.856
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.999,
663
+ "fr": 0.995,
664
+ "de": 0.891,
665
+ "es": 0.997,
666
+ "nl": 0.945
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_2_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "fbca94ae-5787-46dc-970b-d8570b6802e4",
30
+ "datetime_epoch_millis": 1737016293133,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9551562961190939,
44
+ "sae_top_1_test_accuracy": 0.7893312499999999,
45
+ "sae_top_2_test_accuracy": 0.8380375000000001,
46
+ "sae_top_5_test_accuracy": 0.8916499999999998,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9652000427246094,
65
+ "sae_top_1_test_accuracy": 0.7684000000000001,
66
+ "sae_top_2_test_accuracy": 0.8314,
67
+ "sae_top_5_test_accuracy": 0.9057999999999999,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9512000441551208,
84
+ "sae_top_1_test_accuracy": 0.7933999999999999,
85
+ "sae_top_2_test_accuracy": 0.8224,
86
+ "sae_top_5_test_accuracy": 0.8808,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9258000373840332,
103
+ "sae_top_1_test_accuracy": 0.7842,
104
+ "sae_top_2_test_accuracy": 0.8166,
105
+ "sae_top_5_test_accuracy": 0.8484,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9108000516891479,
122
+ "sae_top_1_test_accuracy": 0.736,
123
+ "sae_top_2_test_accuracy": 0.7852,
124
+ "sae_top_5_test_accuracy": 0.8488,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9725000560283661,
141
+ "sae_top_1_test_accuracy": 0.775,
142
+ "sae_top_2_test_accuracy": 0.824,
143
+ "sae_top_5_test_accuracy": 0.931,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.968600046634674,
160
+ "sae_top_1_test_accuracy": 0.7858,
161
+ "sae_top_2_test_accuracy": 0.8344000000000001,
162
+ "sae_top_5_test_accuracy": 0.8788,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9477500468492508,
179
+ "sae_top_1_test_accuracy": 0.8132499999999999,
180
+ "sae_top_2_test_accuracy": 0.8735,
181
+ "sae_top_5_test_accuracy": 0.8919999999999999,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9994000434875489,
198
+ "sae_top_1_test_accuracy": 0.8586,
199
+ "sae_top_2_test_accuracy": 0.9168,
200
+ "sae_top_5_test_accuracy": 0.9475999999999999,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_StandardTrainerAprilUpdate_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_2",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "standard_april_update",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9450000524520874,
240
+ "1": 0.9550000429153442,
241
+ "2": 0.9530000686645508,
242
+ "6": 0.9940000176429749,
243
+ "9": 0.9790000319480896
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.625,
275
+ "1": 0.674,
276
+ "2": 0.64,
277
+ "6": 0.974,
278
+ "9": 0.929
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.608,
282
+ "1": 0.828,
283
+ "2": 0.807,
284
+ "6": 0.98,
285
+ "9": 0.934
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.839,
289
+ "1": 0.872,
290
+ "2": 0.876,
291
+ "6": 0.983,
292
+ "9": 0.959
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9590000510215759,
298
+ "13": 0.9630000591278076,
299
+ "14": 0.9520000219345093,
300
+ "18": 0.9290000200271606,
301
+ "19": 0.9530000686645508
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.727,
333
+ "13": 0.792,
334
+ "14": 0.872,
335
+ "18": 0.723,
336
+ "19": 0.853
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.867,
340
+ "13": 0.799,
341
+ "14": 0.882,
342
+ "18": 0.721,
343
+ "19": 0.843
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.926,
347
+ "13": 0.85,
348
+ "14": 0.872,
349
+ "18": 0.847,
350
+ "19": 0.909
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.956000030040741,
356
+ "21": 0.9220000505447388,
357
+ "22": 0.9080000519752502,
358
+ "25": 0.9600000381469727,
359
+ "26": 0.8830000162124634
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.878,
391
+ "21": 0.799,
392
+ "22": 0.676,
393
+ "25": 0.878,
394
+ "26": 0.69
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.887,
398
+ "21": 0.824,
399
+ "22": 0.716,
400
+ "25": 0.905,
401
+ "26": 0.751
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.917,
405
+ "21": 0.853,
406
+ "22": 0.706,
407
+ "25": 0.915,
408
+ "26": 0.851
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9450000524520874,
414
+ "2": 0.921000063419342,
415
+ "3": 0.9160000681877136,
416
+ "5": 0.9180000424385071,
417
+ "6": 0.8540000319480896
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.837,
449
+ "2": 0.639,
450
+ "3": 0.695,
451
+ "5": 0.78,
452
+ "6": 0.729
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.899,
456
+ "2": 0.744,
457
+ "3": 0.756,
458
+ "5": 0.799,
459
+ "6": 0.728
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.909,
463
+ "2": 0.878,
464
+ "3": 0.828,
465
+ "5": 0.86,
466
+ "6": 0.769
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9730000495910645,
472
+ "5.0": 0.9720000624656677
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.775,
492
+ "5.0": 0.775
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.824,
496
+ "5.0": 0.824
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.931,
500
+ "5.0": 0.931
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9420000314712524,
506
+ "Python": 0.9860000610351562,
507
+ "HTML": 0.987000048160553,
508
+ "Java": 0.9660000205039978,
509
+ "PHP": 0.9620000720024109
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.62,
541
+ "Python": 0.916,
542
+ "HTML": 0.85,
543
+ "Java": 0.65,
544
+ "PHP": 0.893
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.782,
548
+ "Python": 0.921,
549
+ "HTML": 0.908,
550
+ "Java": 0.656,
551
+ "PHP": 0.905
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.81,
555
+ "Python": 0.967,
556
+ "HTML": 0.94,
557
+ "Java": 0.782,
558
+ "PHP": 0.895
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.9340000152587891,
564
+ "1": 0.9900000691413879,
565
+ "2": 0.9160000681877136,
566
+ "3": 0.9510000348091125
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.83,
594
+ "1": 0.938,
595
+ "2": 0.829,
596
+ "3": 0.656
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.846,
600
+ "1": 0.964,
601
+ "2": 0.845,
602
+ "3": 0.839
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.879,
606
+ "1": 0.968,
607
+ "2": 0.848,
608
+ "3": 0.873
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 1.0,
614
+ "fr": 0.999000072479248,
615
+ "de": 1.0,
616
+ "es": 0.999000072479248,
617
+ "nl": 0.999000072479248
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 1.0,
649
+ "fr": 0.839,
650
+ "de": 0.893,
651
+ "es": 0.997,
652
+ "nl": 0.564
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 0.998,
656
+ "fr": 0.85,
657
+ "de": 0.888,
658
+ "es": 0.995,
659
+ "nl": 0.853
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.998,
663
+ "fr": 0.993,
664
+ "de": 0.888,
665
+ "es": 0.996,
666
+ "nl": 0.863
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_3_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "3e1bae8c-049d-4201-9c40-24b7e7d1773b",
30
+ "datetime_epoch_millis": 1737016501814,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9528187900781632,
44
+ "sae_top_1_test_accuracy": 0.8107500000000001,
45
+ "sae_top_2_test_accuracy": 0.8371,
46
+ "sae_top_5_test_accuracy": 0.8854562500000001,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9646000385284423,
65
+ "sae_top_1_test_accuracy": 0.8019999999999999,
66
+ "sae_top_2_test_accuracy": 0.8272,
67
+ "sae_top_5_test_accuracy": 0.8882,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9492000341415405,
84
+ "sae_top_1_test_accuracy": 0.7849999999999999,
85
+ "sae_top_2_test_accuracy": 0.8128,
86
+ "sae_top_5_test_accuracy": 0.8901999999999999,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9214000344276428,
103
+ "sae_top_1_test_accuracy": 0.8131999999999999,
104
+ "sae_top_2_test_accuracy": 0.8474,
105
+ "sae_top_5_test_accuracy": 0.8726,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9028000473976135,
122
+ "sae_top_1_test_accuracy": 0.7762,
123
+ "sae_top_2_test_accuracy": 0.8018000000000001,
124
+ "sae_top_5_test_accuracy": 0.8371999999999999,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9670000672340393,
141
+ "sae_top_1_test_accuracy": 0.782,
142
+ "sae_top_2_test_accuracy": 0.823,
143
+ "sae_top_5_test_accuracy": 0.932,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9684000253677368,
160
+ "sae_top_1_test_accuracy": 0.7894,
161
+ "sae_top_2_test_accuracy": 0.7938000000000001,
162
+ "sae_top_5_test_accuracy": 0.8452,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9517500400543213,
179
+ "sae_top_1_test_accuracy": 0.81,
180
+ "sae_top_2_test_accuracy": 0.845,
181
+ "sae_top_5_test_accuracy": 0.87425,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9974000334739686,
198
+ "sae_top_1_test_accuracy": 0.9282,
199
+ "sae_top_2_test_accuracy": 0.9458,
200
+ "sae_top_5_test_accuracy": 0.944,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_StandardTrainerAprilUpdate_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_3",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "standard_april_update",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9430000185966492,
240
+ "1": 0.9650000333786011,
241
+ "2": 0.9520000219345093,
242
+ "6": 0.9920000433921814,
243
+ "9": 0.971000075340271
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.603,
275
+ "1": 0.681,
276
+ "2": 0.809,
277
+ "6": 0.977,
278
+ "9": 0.94
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.722,
282
+ "1": 0.681,
283
+ "2": 0.823,
284
+ "6": 0.979,
285
+ "9": 0.931
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.861,
289
+ "1": 0.761,
290
+ "2": 0.886,
291
+ "6": 0.976,
292
+ "9": 0.957
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9660000205039978,
298
+ "13": 0.9510000348091125,
299
+ "14": 0.9500000476837158,
300
+ "18": 0.9230000376701355,
301
+ "19": 0.956000030040741
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.729,
333
+ "13": 0.76,
334
+ "14": 0.868,
335
+ "18": 0.725,
336
+ "19": 0.843
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.85,
340
+ "13": 0.782,
341
+ "14": 0.867,
342
+ "18": 0.725,
343
+ "19": 0.84
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.932,
347
+ "13": 0.859,
348
+ "14": 0.872,
349
+ "18": 0.886,
350
+ "19": 0.902
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9490000605583191,
356
+ "21": 0.9200000166893005,
357
+ "22": 0.9040000438690186,
358
+ "25": 0.9600000381469727,
359
+ "26": 0.8740000128746033
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.872,
391
+ "21": 0.76,
392
+ "22": 0.859,
393
+ "25": 0.883,
394
+ "26": 0.692
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.898,
398
+ "21": 0.79,
399
+ "22": 0.892,
400
+ "25": 0.895,
401
+ "26": 0.762
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.904,
405
+ "21": 0.814,
406
+ "22": 0.904,
407
+ "25": 0.921,
408
+ "26": 0.82
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9390000700950623,
414
+ "2": 0.9260000586509705,
415
+ "3": 0.9030000567436218,
416
+ "5": 0.9010000228881836,
417
+ "6": 0.8450000286102295
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.862,
449
+ "2": 0.793,
450
+ "3": 0.726,
451
+ "5": 0.795,
452
+ "6": 0.705
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.886,
456
+ "2": 0.813,
457
+ "3": 0.725,
458
+ "5": 0.835,
459
+ "6": 0.75
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.912,
463
+ "2": 0.838,
464
+ "3": 0.811,
465
+ "5": 0.861,
466
+ "6": 0.764
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9670000672340393,
472
+ "5.0": 0.9670000672340393
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.782,
492
+ "5.0": 0.782
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.823,
496
+ "5.0": 0.823
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.932,
500
+ "5.0": 0.932
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9570000171661377,
506
+ "Python": 0.9800000190734863,
507
+ "HTML": 0.9880000352859497,
508
+ "Java": 0.9610000252723694,
509
+ "PHP": 0.956000030040741
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.627,
541
+ "Python": 0.876,
542
+ "HTML": 0.876,
543
+ "Java": 0.669,
544
+ "PHP": 0.899
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.624,
548
+ "Python": 0.888,
549
+ "HTML": 0.9,
550
+ "Java": 0.66,
551
+ "PHP": 0.897
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.674,
555
+ "Python": 0.894,
556
+ "HTML": 0.956,
557
+ "Java": 0.784,
558
+ "PHP": 0.918
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.9410000443458557,
564
+ "1": 0.984000027179718,
565
+ "2": 0.9320000410079956,
566
+ "3": 0.9500000476837158
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.849,
594
+ "1": 0.935,
595
+ "2": 0.723,
596
+ "3": 0.733
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.864,
600
+ "1": 0.954,
601
+ "2": 0.823,
602
+ "3": 0.739
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.879,
606
+ "1": 0.96,
607
+ "2": 0.831,
608
+ "3": 0.827
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 0.9970000386238098,
614
+ "fr": 0.9970000386238098,
615
+ "de": 0.9980000257492065,
616
+ "es": 0.9980000257492065,
617
+ "nl": 0.9970000386238098
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 0.997,
649
+ "fr": 0.995,
650
+ "de": 0.896,
651
+ "es": 0.985,
652
+ "nl": 0.768
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 0.999,
656
+ "fr": 0.995,
657
+ "de": 0.896,
658
+ "es": 0.986,
659
+ "nl": 0.853
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.999,
663
+ "fr": 0.994,
664
+ "de": 0.893,
665
+ "es": 0.993,
666
+ "nl": 0.841
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "5473ac5b-1429-41e2-b5a5-98813d6ae44f",
30
+ "datetime_epoch_millis": 1737016707885,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9496312946081161,
44
+ "sae_top_1_test_accuracy": 0.8060125,
45
+ "sae_top_2_test_accuracy": 0.8362624999999999,
46
+ "sae_top_5_test_accuracy": 0.8849562500000001,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9606000423431397,
65
+ "sae_top_1_test_accuracy": 0.8173999999999999,
66
+ "sae_top_2_test_accuracy": 0.8291999999999999,
67
+ "sae_top_5_test_accuracy": 0.8932,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9398000478744507,
84
+ "sae_top_1_test_accuracy": 0.7926,
85
+ "sae_top_2_test_accuracy": 0.8177999999999999,
86
+ "sae_top_5_test_accuracy": 0.8879999999999999,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9206000566482544,
103
+ "sae_top_1_test_accuracy": 0.7984,
104
+ "sae_top_2_test_accuracy": 0.8486,
105
+ "sae_top_5_test_accuracy": 0.8620000000000001,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9040000319480896,
122
+ "sae_top_1_test_accuracy": 0.7312000000000001,
123
+ "sae_top_2_test_accuracy": 0.789,
124
+ "sae_top_5_test_accuracy": 0.8344000000000001,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9645000398159027,
141
+ "sae_top_1_test_accuracy": 0.78,
142
+ "sae_top_2_test_accuracy": 0.817,
143
+ "sae_top_5_test_accuracy": 0.925,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9618000507354736,
160
+ "sae_top_1_test_accuracy": 0.7946,
161
+ "sae_top_2_test_accuracy": 0.7988000000000001,
162
+ "sae_top_5_test_accuracy": 0.8442000000000001,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9487500488758087,
179
+ "sae_top_1_test_accuracy": 0.8175000000000001,
180
+ "sae_top_2_test_accuracy": 0.8545,
181
+ "sae_top_5_test_accuracy": 0.8802500000000001,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9970000386238098,
198
+ "sae_top_1_test_accuracy": 0.9164,
199
+ "sae_top_2_test_accuracy": 0.9352,
200
+ "sae_top_5_test_accuracy": 0.9526,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_StandardTrainerAprilUpdate_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_4",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "standard_april_update",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.940000057220459,
240
+ "1": 0.9500000476837158,
241
+ "2": 0.9510000348091125,
242
+ "6": 0.987000048160553,
243
+ "9": 0.9750000238418579
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.632,
275
+ "1": 0.659,
276
+ "2": 0.886,
277
+ "6": 0.979,
278
+ "9": 0.931
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.628,
282
+ "1": 0.713,
283
+ "2": 0.883,
284
+ "6": 0.98,
285
+ "9": 0.942
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.872,
289
+ "1": 0.767,
290
+ "2": 0.899,
291
+ "6": 0.978,
292
+ "9": 0.95
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9550000429153442,
298
+ "13": 0.9390000700950623,
299
+ "14": 0.9410000443458557,
300
+ "18": 0.9100000262260437,
301
+ "19": 0.9540000557899475
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.731,
333
+ "13": 0.783,
334
+ "14": 0.876,
335
+ "18": 0.729,
336
+ "19": 0.844
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.852,
340
+ "13": 0.783,
341
+ "14": 0.876,
342
+ "18": 0.728,
343
+ "19": 0.85
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.927,
347
+ "13": 0.864,
348
+ "14": 0.882,
349
+ "18": 0.864,
350
+ "19": 0.903
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9550000429153442,
356
+ "21": 0.9170000553131104,
357
+ "22": 0.9020000696182251,
358
+ "25": 0.9530000686645508,
359
+ "26": 0.8760000467300415
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.829,
391
+ "21": 0.736,
392
+ "22": 0.855,
393
+ "25": 0.872,
394
+ "26": 0.7
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.879,
398
+ "21": 0.772,
399
+ "22": 0.903,
400
+ "25": 0.904,
401
+ "26": 0.785
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.923,
405
+ "21": 0.797,
406
+ "22": 0.89,
407
+ "25": 0.904,
408
+ "26": 0.796
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9420000314712524,
414
+ "2": 0.9220000505447388,
415
+ "3": 0.9040000438690186,
416
+ "5": 0.9010000228881836,
417
+ "6": 0.8510000109672546
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.765,
449
+ "2": 0.61,
450
+ "3": 0.686,
451
+ "5": 0.87,
452
+ "6": 0.725
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.852,
456
+ "2": 0.76,
457
+ "3": 0.718,
458
+ "5": 0.878,
459
+ "6": 0.737
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.897,
463
+ "2": 0.802,
464
+ "3": 0.82,
465
+ "5": 0.883,
466
+ "6": 0.77
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9630000591278076,
472
+ "5.0": 0.9660000205039978
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.78,
492
+ "5.0": 0.78
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.817,
496
+ "5.0": 0.817
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.925,
500
+ "5.0": 0.925
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9420000314712524,
506
+ "Python": 0.971000075340271,
507
+ "HTML": 0.9860000610351562,
508
+ "Java": 0.9590000510215759,
509
+ "PHP": 0.9510000348091125
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.639,
541
+ "Python": 0.878,
542
+ "HTML": 0.899,
543
+ "Java": 0.647,
544
+ "PHP": 0.91
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.633,
548
+ "Python": 0.896,
549
+ "HTML": 0.899,
550
+ "Java": 0.652,
551
+ "PHP": 0.914
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.659,
555
+ "Python": 0.918,
556
+ "HTML": 0.951,
557
+ "Java": 0.771,
558
+ "PHP": 0.922
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.9360000491142273,
564
+ "1": 0.9880000352859497,
565
+ "2": 0.9270000457763672,
566
+ "3": 0.9440000653266907
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.856,
594
+ "1": 0.818,
595
+ "2": 0.792,
596
+ "3": 0.804
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.866,
600
+ "1": 0.915,
601
+ "2": 0.847,
602
+ "3": 0.79
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.872,
606
+ "1": 0.954,
607
+ "2": 0.853,
608
+ "3": 0.842
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 0.999000072479248,
614
+ "fr": 1.0,
615
+ "de": 0.9970000386238098,
616
+ "es": 0.9980000257492065,
617
+ "nl": 0.9910000562667847
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 0.998,
649
+ "fr": 0.979,
650
+ "de": 0.824,
651
+ "es": 0.996,
652
+ "nl": 0.785
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 1.0,
656
+ "fr": 0.978,
657
+ "de": 0.92,
658
+ "es": 0.997,
659
+ "nl": 0.781
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.999,
663
+ "fr": 0.996,
664
+ "de": 0.942,
665
+ "es": 0.995,
666
+ "nl": 0.831
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_Standard_gemma-2-2b__0108_resid_post_layer_12_trainer_5_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "bcd98792-4b66-4313-913b-0f83b782f3eb",
30
+ "datetime_epoch_millis": 1737016911019,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9422062955796718,
44
+ "sae_top_1_test_accuracy": 0.72954375,
45
+ "sae_top_2_test_accuracy": 0.7786312499999999,
46
+ "sae_top_5_test_accuracy": 0.8480874999999999,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9550000429153442,
65
+ "sae_top_1_test_accuracy": 0.7577999999999999,
66
+ "sae_top_2_test_accuracy": 0.8106,
67
+ "sae_top_5_test_accuracy": 0.8378,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9332000494003296,
84
+ "sae_top_1_test_accuracy": 0.7536,
85
+ "sae_top_2_test_accuracy": 0.812,
86
+ "sae_top_5_test_accuracy": 0.8806,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9116000533103943,
103
+ "sae_top_1_test_accuracy": 0.8034000000000001,
104
+ "sae_top_2_test_accuracy": 0.8314,
105
+ "sae_top_5_test_accuracy": 0.8635999999999999,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.8948000431060791,
122
+ "sae_top_1_test_accuracy": 0.6846,
123
+ "sae_top_2_test_accuracy": 0.7862,
124
+ "sae_top_5_test_accuracy": 0.8350000000000002,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9565000236034393,
141
+ "sae_top_1_test_accuracy": 0.617,
142
+ "sae_top_2_test_accuracy": 0.692,
143
+ "sae_top_5_test_accuracy": 0.901,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9616000533103943,
160
+ "sae_top_1_test_accuracy": 0.7447999999999999,
161
+ "sae_top_2_test_accuracy": 0.755,
162
+ "sae_top_5_test_accuracy": 0.8016,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9457500576972961,
179
+ "sae_top_1_test_accuracy": 0.7247499999999999,
180
+ "sae_top_2_test_accuracy": 0.7912500000000001,
181
+ "sae_top_5_test_accuracy": 0.8835,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9792000412940979,
198
+ "sae_top_1_test_accuracy": 0.7504,
199
+ "sae_top_2_test_accuracy": 0.7505999999999999,
200
+ "sae_top_5_test_accuracy": 0.7816,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_StandardTrainerAprilUpdate_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_5",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "standard_april_update",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9290000200271606,
240
+ "1": 0.9500000476837158,
241
+ "2": 0.9450000524520874,
242
+ "6": 0.9820000529289246,
243
+ "9": 0.9690000414848328
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.615,
275
+ "1": 0.662,
276
+ "2": 0.601,
277
+ "6": 0.978,
278
+ "9": 0.933
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.628,
282
+ "1": 0.663,
283
+ "2": 0.855,
284
+ "6": 0.977,
285
+ "9": 0.93
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.727,
289
+ "1": 0.681,
290
+ "2": 0.86,
291
+ "6": 0.976,
292
+ "9": 0.945
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9480000734329224,
298
+ "13": 0.9360000491142273,
299
+ "14": 0.9270000457763672,
300
+ "18": 0.9030000567436218,
301
+ "19": 0.9520000219345093
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.728,
333
+ "13": 0.763,
334
+ "14": 0.705,
335
+ "18": 0.728,
336
+ "19": 0.844
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.86,
340
+ "13": 0.721,
341
+ "14": 0.901,
342
+ "18": 0.723,
343
+ "19": 0.855
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.863,
347
+ "13": 0.859,
348
+ "14": 0.899,
349
+ "18": 0.885,
350
+ "19": 0.897
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9440000653266907,
356
+ "21": 0.8980000615119934,
357
+ "22": 0.9020000696182251,
358
+ "25": 0.9510000348091125,
359
+ "26": 0.8630000352859497
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.794,
391
+ "21": 0.754,
392
+ "22": 0.884,
393
+ "25": 0.869,
394
+ "26": 0.716
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.871,
398
+ "21": 0.755,
399
+ "22": 0.89,
400
+ "25": 0.875,
401
+ "26": 0.766
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.905,
405
+ "21": 0.832,
406
+ "22": 0.88,
407
+ "25": 0.917,
408
+ "26": 0.784
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9330000281333923,
414
+ "2": 0.921000063419342,
415
+ "3": 0.8950000405311584,
416
+ "5": 0.8920000195503235,
417
+ "6": 0.8330000638961792
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.726,
449
+ "2": 0.63,
450
+ "3": 0.623,
451
+ "5": 0.743,
452
+ "6": 0.701
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.796,
456
+ "2": 0.838,
457
+ "3": 0.715,
458
+ "5": 0.862,
459
+ "6": 0.72
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.9,
463
+ "2": 0.853,
464
+ "3": 0.786,
465
+ "5": 0.893,
466
+ "6": 0.743
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.956000030040741,
472
+ "5.0": 0.9570000171661377
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.617,
492
+ "5.0": 0.617
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.692,
496
+ "5.0": 0.692
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.901,
500
+ "5.0": 0.901
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9420000314712524,
506
+ "Python": 0.9740000367164612,
507
+ "HTML": 0.9850000739097595,
508
+ "Java": 0.9580000638961792,
509
+ "PHP": 0.9490000605583191
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.627,
541
+ "Python": 0.617,
542
+ "HTML": 0.899,
543
+ "Java": 0.674,
544
+ "PHP": 0.907
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.629,
548
+ "Python": 0.704,
549
+ "HTML": 0.902,
550
+ "Java": 0.632,
551
+ "PHP": 0.908
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.667,
555
+ "Python": 0.744,
556
+ "HTML": 0.94,
557
+ "Java": 0.745,
558
+ "PHP": 0.912
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.9300000667572021,
564
+ "1": 0.9800000190734863,
565
+ "2": 0.9250000715255737,
566
+ "3": 0.9480000734329224
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.731,
594
+ "1": 0.82,
595
+ "2": 0.714,
596
+ "3": 0.634
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.828,
600
+ "1": 0.913,
601
+ "2": 0.776,
602
+ "3": 0.648
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.884,
606
+ "1": 0.939,
607
+ "2": 0.836,
608
+ "3": 0.875
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 0.9980000257492065,
614
+ "fr": 0.9660000205039978,
615
+ "de": 0.9780000448226929,
616
+ "es": 0.9810000658035278,
617
+ "nl": 0.9730000495910645
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 1.0,
649
+ "fr": 0.558,
650
+ "de": 0.83,
651
+ "es": 0.579,
652
+ "nl": 0.785
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 0.998,
656
+ "fr": 0.576,
657
+ "de": 0.834,
658
+ "es": 0.562,
659
+ "nl": 0.783
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.998,
663
+ "fr": 0.633,
664
+ "de": 0.847,
665
+ "es": 0.629,
666
+ "nl": 0.801
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_TopK_gemma-2-2b__0108_resid_post_layer_12_trainer_0_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "401e4c6b-f199-4156-9c38-8a9701e93d7c",
30
+ "datetime_epoch_millis": 1737017152537,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9460937969386578,
44
+ "sae_top_1_test_accuracy": 0.7426125,
45
+ "sae_top_2_test_accuracy": 0.8056249999999999,
46
+ "sae_top_5_test_accuracy": 0.86946875,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9552000641822815,
65
+ "sae_top_1_test_accuracy": 0.8022,
66
+ "sae_top_2_test_accuracy": 0.8192,
67
+ "sae_top_5_test_accuracy": 0.8728000000000001,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9402000427246093,
84
+ "sae_top_1_test_accuracy": 0.7632,
85
+ "sae_top_2_test_accuracy": 0.7689999999999999,
86
+ "sae_top_5_test_accuracy": 0.8304,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9194000363349915,
103
+ "sae_top_1_test_accuracy": 0.8128,
104
+ "sae_top_2_test_accuracy": 0.8385999999999999,
105
+ "sae_top_5_test_accuracy": 0.8619999999999999,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.8964000344276428,
122
+ "sae_top_1_test_accuracy": 0.6274000000000001,
123
+ "sae_top_2_test_accuracy": 0.6878,
124
+ "sae_top_5_test_accuracy": 0.8140000000000001,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9590000510215759,
141
+ "sae_top_1_test_accuracy": 0.662,
142
+ "sae_top_2_test_accuracy": 0.835,
143
+ "sae_top_5_test_accuracy": 0.933,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.96500004529953,
160
+ "sae_top_1_test_accuracy": 0.6572,
161
+ "sae_top_2_test_accuracy": 0.8075999999999999,
162
+ "sae_top_5_test_accuracy": 0.8565999999999999,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9437500536441803,
179
+ "sae_top_1_test_accuracy": 0.7464999999999999,
180
+ "sae_top_2_test_accuracy": 0.781,
181
+ "sae_top_5_test_accuracy": 0.8407499999999999,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9898000478744506,
198
+ "sae_top_1_test_accuracy": 0.8695999999999999,
199
+ "sae_top_2_test_accuracy": 0.9067999999999999,
200
+ "sae_top_5_test_accuracy": 0.9461999999999999,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_TopKTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_0",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "topk",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9300000667572021,
240
+ "1": 0.9490000605583191,
241
+ "2": 0.9490000605583191,
242
+ "6": 0.9860000610351562,
243
+ "9": 0.9620000720024109
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.577,
275
+ "1": 0.623,
276
+ "2": 0.892,
277
+ "6": 0.98,
278
+ "9": 0.939
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.63,
282
+ "1": 0.672,
283
+ "2": 0.878,
284
+ "6": 0.977,
285
+ "9": 0.939
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.776,
289
+ "1": 0.746,
290
+ "2": 0.904,
291
+ "6": 0.99,
292
+ "9": 0.948
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9520000219345093,
298
+ "13": 0.9540000557899475,
299
+ "14": 0.9500000476837158,
300
+ "18": 0.8980000615119934,
301
+ "19": 0.9470000267028809
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.731,
333
+ "13": 0.685,
334
+ "14": 0.868,
335
+ "18": 0.699,
336
+ "19": 0.833
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.737,
340
+ "13": 0.671,
341
+ "14": 0.876,
342
+ "18": 0.723,
343
+ "19": 0.838
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.947,
347
+ "13": 0.738,
348
+ "14": 0.87,
349
+ "18": 0.739,
350
+ "19": 0.858
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9430000185966492,
356
+ "21": 0.9100000262260437,
357
+ "22": 0.9040000438690186,
358
+ "25": 0.9520000219345093,
359
+ "26": 0.8880000710487366
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.863,
391
+ "21": 0.818,
392
+ "22": 0.883,
393
+ "25": 0.875,
394
+ "26": 0.625
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.877,
398
+ "21": 0.842,
399
+ "22": 0.885,
400
+ "25": 0.872,
401
+ "26": 0.717
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.921,
405
+ "21": 0.85,
406
+ "22": 0.886,
407
+ "25": 0.885,
408
+ "26": 0.768
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.937000036239624,
414
+ "2": 0.909000039100647,
415
+ "3": 0.9000000357627869,
416
+ "5": 0.8870000243186951,
417
+ "6": 0.8490000367164612
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.803,
449
+ "2": 0.584,
450
+ "3": 0.587,
451
+ "5": 0.556,
452
+ "6": 0.607
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.814,
456
+ "2": 0.631,
457
+ "3": 0.694,
458
+ "5": 0.566,
459
+ "6": 0.734
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.889,
463
+ "2": 0.797,
464
+ "3": 0.748,
465
+ "5": 0.868,
466
+ "6": 0.768
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9590000510215759,
472
+ "5.0": 0.9590000510215759
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.662,
492
+ "5.0": 0.662
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.835,
496
+ "5.0": 0.835
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.933,
500
+ "5.0": 0.933
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9520000219345093,
506
+ "Python": 0.9770000576972961,
507
+ "HTML": 0.9880000352859497,
508
+ "Java": 0.9590000510215759,
509
+ "PHP": 0.9490000605583191
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.531,
541
+ "Python": 0.624,
542
+ "HTML": 0.901,
543
+ "Java": 0.632,
544
+ "PHP": 0.598
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.621,
548
+ "Python": 0.9,
549
+ "HTML": 0.909,
550
+ "Java": 0.691,
551
+ "PHP": 0.917
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.727,
555
+ "Python": 0.91,
556
+ "HTML": 0.953,
557
+ "Java": 0.775,
558
+ "PHP": 0.918
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.937000036239624,
564
+ "1": 0.9810000658035278,
565
+ "2": 0.9160000681877136,
566
+ "3": 0.9410000443458557
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.74,
594
+ "1": 0.833,
595
+ "2": 0.778,
596
+ "3": 0.635
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.776,
600
+ "1": 0.84,
601
+ "2": 0.835,
602
+ "3": 0.673
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.844,
606
+ "1": 0.904,
607
+ "2": 0.831,
608
+ "3": 0.784
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 0.9980000257492065,
614
+ "fr": 0.9960000514984131,
615
+ "de": 0.9810000658035278,
616
+ "es": 0.9920000433921814,
617
+ "nl": 0.9820000529289246
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 1.0,
649
+ "fr": 0.971,
650
+ "de": 0.886,
651
+ "es": 0.734,
652
+ "nl": 0.757
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 0.999,
656
+ "fr": 0.985,
657
+ "de": 0.908,
658
+ "es": 0.896,
659
+ "nl": 0.746
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.998,
663
+ "fr": 0.993,
664
+ "de": 0.918,
665
+ "es": 0.981,
666
+ "nl": 0.841
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_TopK_gemma-2-2b__0108_resid_post_layer_12_trainer_1_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "196b15b8-cb5c-4b6e-ba9f-90eb60e8bc96",
30
+ "datetime_epoch_millis": 1737017404608,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9507812939584256,
44
+ "sae_top_1_test_accuracy": 0.74349375,
45
+ "sae_top_2_test_accuracy": 0.8073125,
46
+ "sae_top_5_test_accuracy": 0.8753875,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9606000304222106,
65
+ "sae_top_1_test_accuracy": 0.7778,
66
+ "sae_top_2_test_accuracy": 0.8244,
67
+ "sae_top_5_test_accuracy": 0.8687999999999999,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9438000440597534,
84
+ "sae_top_1_test_accuracy": 0.7367999999999999,
85
+ "sae_top_2_test_accuracy": 0.7623999999999999,
86
+ "sae_top_5_test_accuracy": 0.8648,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9222000479698181,
103
+ "sae_top_1_test_accuracy": 0.8126,
104
+ "sae_top_2_test_accuracy": 0.8382,
105
+ "sae_top_5_test_accuracy": 0.8635999999999999,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9088000416755676,
122
+ "sae_top_1_test_accuracy": 0.6832,
123
+ "sae_top_2_test_accuracy": 0.7404,
124
+ "sae_top_5_test_accuracy": 0.8333999999999999,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9595000445842743,
141
+ "sae_top_1_test_accuracy": 0.654,
142
+ "sae_top_2_test_accuracy": 0.797,
143
+ "sae_top_5_test_accuracy": 0.902,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9638000369071961,
160
+ "sae_top_1_test_accuracy": 0.6573999999999999,
161
+ "sae_top_2_test_accuracy": 0.8016,
162
+ "sae_top_5_test_accuracy": 0.8622,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9507500529289246,
179
+ "sae_top_1_test_accuracy": 0.72875,
180
+ "sae_top_2_test_accuracy": 0.7935,
181
+ "sae_top_5_test_accuracy": 0.8575,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9968000531196595,
198
+ "sae_top_1_test_accuracy": 0.8974,
199
+ "sae_top_2_test_accuracy": 0.901,
200
+ "sae_top_5_test_accuracy": 0.9507999999999999,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_TopKTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_1",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "topk",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9420000314712524,
240
+ "1": 0.9600000381469727,
241
+ "2": 0.9520000219345093,
242
+ "6": 0.9880000352859497,
243
+ "9": 0.9610000252723694
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.586,
275
+ "1": 0.64,
276
+ "2": 0.889,
277
+ "6": 0.833,
278
+ "9": 0.941
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.665,
282
+ "1": 0.631,
283
+ "2": 0.9,
284
+ "6": 0.98,
285
+ "9": 0.946
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.796,
289
+ "1": 0.732,
290
+ "2": 0.892,
291
+ "6": 0.976,
292
+ "9": 0.948
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9610000252723694,
298
+ "13": 0.9450000524520874,
299
+ "14": 0.9410000443458557,
300
+ "18": 0.9160000681877136,
301
+ "19": 0.956000030040741
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.597,
333
+ "13": 0.683,
334
+ "14": 0.863,
335
+ "18": 0.697,
336
+ "19": 0.844
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.733,
340
+ "13": 0.672,
341
+ "14": 0.863,
342
+ "18": 0.703,
343
+ "19": 0.841
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.936,
347
+ "13": 0.806,
348
+ "14": 0.864,
349
+ "18": 0.872,
350
+ "19": 0.846
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9550000429153442,
356
+ "21": 0.9180000424385071,
357
+ "22": 0.9010000228881836,
358
+ "25": 0.9530000686645508,
359
+ "26": 0.8840000629425049
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.867,
391
+ "21": 0.816,
392
+ "22": 0.887,
393
+ "25": 0.871,
394
+ "26": 0.622
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.901,
398
+ "21": 0.807,
399
+ "22": 0.887,
400
+ "25": 0.876,
401
+ "26": 0.72
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.929,
405
+ "21": 0.834,
406
+ "22": 0.891,
407
+ "25": 0.899,
408
+ "26": 0.765
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9450000524520874,
414
+ "2": 0.9220000505447388,
415
+ "3": 0.9190000295639038,
416
+ "5": 0.8980000615119934,
417
+ "6": 0.8600000143051147
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.824,
449
+ "2": 0.623,
450
+ "3": 0.617,
451
+ "5": 0.609,
452
+ "6": 0.743
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.846,
456
+ "2": 0.865,
457
+ "3": 0.615,
458
+ "5": 0.605,
459
+ "6": 0.771
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.908,
463
+ "2": 0.87,
464
+ "3": 0.764,
465
+ "5": 0.847,
466
+ "6": 0.778
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9600000381469727,
472
+ "5.0": 0.9590000510215759
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.654,
492
+ "5.0": 0.654
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.797,
496
+ "5.0": 0.797
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.902,
500
+ "5.0": 0.902
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9420000314712524,
506
+ "Python": 0.9750000238418579,
507
+ "HTML": 0.987000048160553,
508
+ "Java": 0.9610000252723694,
509
+ "PHP": 0.9540000557899475
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.527,
541
+ "Python": 0.631,
542
+ "HTML": 0.897,
543
+ "Java": 0.634,
544
+ "PHP": 0.598
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.634,
548
+ "Python": 0.915,
549
+ "HTML": 0.896,
550
+ "Java": 0.651,
551
+ "PHP": 0.912
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.626,
555
+ "Python": 0.944,
556
+ "HTML": 0.922,
557
+ "Java": 0.898,
558
+ "PHP": 0.921
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.9390000700950623,
564
+ "1": 0.9810000658035278,
565
+ "2": 0.940000057220459,
566
+ "3": 0.9430000185966492
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.708,
594
+ "1": 0.833,
595
+ "2": 0.751,
596
+ "3": 0.623
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.774,
600
+ "1": 0.92,
601
+ "2": 0.834,
602
+ "3": 0.646
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.839,
606
+ "1": 0.937,
607
+ "2": 0.84,
608
+ "3": 0.814
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 0.999000072479248,
614
+ "fr": 1.0,
615
+ "de": 0.9910000562667847,
616
+ "es": 0.999000072479248,
617
+ "nl": 0.9950000643730164
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 0.999,
649
+ "fr": 0.99,
650
+ "de": 0.877,
651
+ "es": 0.87,
652
+ "nl": 0.751
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 0.998,
656
+ "fr": 0.994,
657
+ "de": 0.883,
658
+ "es": 0.878,
659
+ "nl": 0.752
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.997,
663
+ "fr": 0.997,
664
+ "de": 0.891,
665
+ "es": 0.992,
666
+ "nl": 0.877
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_TopK_gemma-2-2b__0108_resid_post_layer_12_trainer_2_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "3e7eb5c9-3c5f-493a-8e0c-799fa1c49ebb",
30
+ "datetime_epoch_millis": 1737017648703,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9539000429213047,
44
+ "sae_top_1_test_accuracy": 0.7528062499999999,
45
+ "sae_top_2_test_accuracy": 0.80405,
46
+ "sae_top_5_test_accuracy": 0.8890125,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.9662000417709351,
65
+ "sae_top_1_test_accuracy": 0.7726,
66
+ "sae_top_2_test_accuracy": 0.8116000000000001,
67
+ "sae_top_5_test_accuracy": 0.8972000000000001,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9454000353813171,
84
+ "sae_top_1_test_accuracy": 0.7661999999999999,
85
+ "sae_top_2_test_accuracy": 0.7632000000000001,
86
+ "sae_top_5_test_accuracy": 0.8596,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9244000554084778,
103
+ "sae_top_1_test_accuracy": 0.8113999999999999,
104
+ "sae_top_2_test_accuracy": 0.8261999999999998,
105
+ "sae_top_5_test_accuracy": 0.8666,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9130000352859498,
122
+ "sae_top_1_test_accuracy": 0.679,
123
+ "sae_top_2_test_accuracy": 0.7300000000000001,
124
+ "sae_top_5_test_accuracy": 0.8092,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9690000414848328,
141
+ "sae_top_1_test_accuracy": 0.651,
142
+ "sae_top_2_test_accuracy": 0.769,
143
+ "sae_top_5_test_accuracy": 0.931,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.966800057888031,
160
+ "sae_top_1_test_accuracy": 0.6736,
161
+ "sae_top_2_test_accuracy": 0.7978000000000002,
162
+ "sae_top_5_test_accuracy": 0.9158,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9490000307559967,
179
+ "sae_top_1_test_accuracy": 0.75425,
180
+ "sae_top_2_test_accuracy": 0.802,
181
+ "sae_top_5_test_accuracy": 0.8705,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9974000453948975,
198
+ "sae_top_1_test_accuracy": 0.9144,
199
+ "sae_top_2_test_accuracy": 0.9326000000000001,
200
+ "sae_top_5_test_accuracy": 0.9621999999999999,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_TopKTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_2",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "topk",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9460000395774841,
240
+ "1": 0.9670000672340393,
241
+ "2": 0.956000030040741,
242
+ "6": 0.9880000352859497,
243
+ "9": 0.9740000367164612
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.586,
275
+ "1": 0.63,
276
+ "2": 0.886,
277
+ "6": 0.827,
278
+ "9": 0.934
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.624,
282
+ "1": 0.629,
283
+ "2": 0.889,
284
+ "6": 0.974,
285
+ "9": 0.942
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.86,
289
+ "1": 0.8,
290
+ "2": 0.898,
291
+ "6": 0.979,
292
+ "9": 0.949
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9600000381469727,
298
+ "13": 0.9520000219345093,
299
+ "14": 0.9430000185966492,
300
+ "18": 0.9190000295639038,
301
+ "19": 0.9530000686645508
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.729,
333
+ "13": 0.683,
334
+ "14": 0.871,
335
+ "18": 0.703,
336
+ "19": 0.845
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.735,
340
+ "13": 0.667,
341
+ "14": 0.863,
342
+ "18": 0.696,
343
+ "19": 0.855
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.944,
347
+ "13": 0.749,
348
+ "14": 0.887,
349
+ "18": 0.868,
350
+ "19": 0.85
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9510000348091125,
356
+ "21": 0.9250000715255737,
357
+ "22": 0.9080000519752502,
358
+ "25": 0.9580000638961792,
359
+ "26": 0.8800000548362732
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.873,
391
+ "21": 0.831,
392
+ "22": 0.866,
393
+ "25": 0.872,
394
+ "26": 0.615
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.884,
398
+ "21": 0.819,
399
+ "22": 0.857,
400
+ "25": 0.861,
401
+ "26": 0.71
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.912,
405
+ "21": 0.85,
406
+ "22": 0.898,
407
+ "25": 0.901,
408
+ "26": 0.772
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9420000314712524,
414
+ "2": 0.9290000200271606,
415
+ "3": 0.9190000295639038,
416
+ "5": 0.9110000729560852,
417
+ "6": 0.8640000224113464
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.84,
449
+ "2": 0.626,
450
+ "3": 0.574,
451
+ "5": 0.652,
452
+ "6": 0.703
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.854,
456
+ "2": 0.856,
457
+ "3": 0.585,
458
+ "5": 0.668,
459
+ "6": 0.687
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.898,
463
+ "2": 0.88,
464
+ "3": 0.72,
465
+ "5": 0.782,
466
+ "6": 0.766
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.968000054359436,
472
+ "5.0": 0.9700000286102295
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.651,
492
+ "5.0": 0.651
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.769,
496
+ "5.0": 0.769
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.931,
500
+ "5.0": 0.931
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.956000030040741,
506
+ "Python": 0.9770000576972961,
507
+ "HTML": 0.9900000691413879,
508
+ "Java": 0.9620000720024109,
509
+ "PHP": 0.9490000605583191
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.618,
541
+ "Python": 0.628,
542
+ "HTML": 0.899,
543
+ "Java": 0.629,
544
+ "PHP": 0.594
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.625,
548
+ "Python": 0.926,
549
+ "HTML": 0.905,
550
+ "Java": 0.647,
551
+ "PHP": 0.886
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.864,
555
+ "Python": 0.948,
556
+ "HTML": 0.947,
557
+ "Java": 0.911,
558
+ "PHP": 0.909
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.9430000185966492,
564
+ "1": 0.9810000658035278,
565
+ "2": 0.9290000200271606,
566
+ "3": 0.9430000185966492
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.76,
594
+ "1": 0.881,
595
+ "2": 0.746,
596
+ "3": 0.63
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.814,
600
+ "1": 0.926,
601
+ "2": 0.832,
602
+ "3": 0.636
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.872,
606
+ "1": 0.964,
607
+ "2": 0.846,
608
+ "3": 0.8
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 0.9970000386238098,
614
+ "fr": 0.999000072479248,
615
+ "de": 0.9970000386238098,
616
+ "es": 0.9980000257492065,
617
+ "nl": 0.9960000514984131
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 1.0,
649
+ "fr": 0.992,
650
+ "de": 0.879,
651
+ "es": 0.947,
652
+ "nl": 0.754
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 1.0,
656
+ "fr": 0.995,
657
+ "de": 0.878,
658
+ "es": 0.961,
659
+ "nl": 0.829
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.999,
663
+ "fr": 0.996,
664
+ "de": 0.935,
665
+ "es": 0.999,
666
+ "nl": 0.882
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_TopK_gemma-2-2b__0108_resid_post_layer_12_trainer_3_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "4b01c28a-49e7-4d94-8072-acc6aea11ea6",
30
+ "datetime_epoch_millis": 1737017887600,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9573437873274088,
44
+ "sae_top_1_test_accuracy": 0.7549375,
45
+ "sae_top_2_test_accuracy": 0.8241875000000002,
46
+ "sae_top_5_test_accuracy": 0.88148125,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.966200053691864,
65
+ "sae_top_1_test_accuracy": 0.7994,
66
+ "sae_top_2_test_accuracy": 0.8146000000000001,
67
+ "sae_top_5_test_accuracy": 0.8962,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9522000312805176,
84
+ "sae_top_1_test_accuracy": 0.7524000000000001,
85
+ "sae_top_2_test_accuracy": 0.7929999999999999,
86
+ "sae_top_5_test_accuracy": 0.8390000000000001,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9294000387191772,
103
+ "sae_top_1_test_accuracy": 0.8131999999999999,
104
+ "sae_top_2_test_accuracy": 0.834,
105
+ "sae_top_5_test_accuracy": 0.8512000000000001,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.915600037574768,
122
+ "sae_top_1_test_accuracy": 0.718,
123
+ "sae_top_2_test_accuracy": 0.7824,
124
+ "sae_top_5_test_accuracy": 0.825,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9745000302791595,
141
+ "sae_top_1_test_accuracy": 0.613,
142
+ "sae_top_2_test_accuracy": 0.82,
143
+ "sae_top_5_test_accuracy": 0.922,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9688000440597534,
160
+ "sae_top_1_test_accuracy": 0.6674,
161
+ "sae_top_2_test_accuracy": 0.7838,
162
+ "sae_top_5_test_accuracy": 0.8754,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9522500485181808,
179
+ "sae_top_1_test_accuracy": 0.7965,
180
+ "sae_top_2_test_accuracy": 0.8325,
181
+ "sae_top_5_test_accuracy": 0.88225,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9998000144958497,
198
+ "sae_top_1_test_accuracy": 0.8795999999999999,
199
+ "sae_top_2_test_accuracy": 0.9332,
200
+ "sae_top_5_test_accuracy": 0.9608000000000001,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_TopKTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_3",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "topk",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9500000476837158,
240
+ "1": 0.9610000252723694,
241
+ "2": 0.9390000700950623,
242
+ "6": 0.9950000643730164,
243
+ "9": 0.9860000610351562
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.588,
275
+ "1": 0.601,
276
+ "2": 0.884,
277
+ "6": 0.979,
278
+ "9": 0.945
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.601,
282
+ "1": 0.656,
283
+ "2": 0.885,
284
+ "6": 0.978,
285
+ "9": 0.953
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.858,
289
+ "1": 0.804,
290
+ "2": 0.884,
291
+ "6": 0.977,
292
+ "9": 0.958
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9610000252723694,
298
+ "13": 0.956000030040741,
299
+ "14": 0.9550000429153442,
300
+ "18": 0.9280000329017639,
301
+ "19": 0.9610000252723694
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.727,
333
+ "13": 0.684,
334
+ "14": 0.862,
335
+ "18": 0.643,
336
+ "19": 0.846
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.857,
340
+ "13": 0.68,
341
+ "14": 0.856,
342
+ "18": 0.725,
343
+ "19": 0.847
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.945,
347
+ "13": 0.785,
348
+ "14": 0.862,
349
+ "18": 0.75,
350
+ "19": 0.853
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9610000252723694,
356
+ "21": 0.9260000586509705,
357
+ "22": 0.9170000553131104,
358
+ "25": 0.9520000219345093,
359
+ "26": 0.8910000324249268
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.876,
391
+ "21": 0.797,
392
+ "22": 0.815,
393
+ "25": 0.883,
394
+ "26": 0.695
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.905,
398
+ "21": 0.836,
399
+ "22": 0.833,
400
+ "25": 0.879,
401
+ "26": 0.717
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.915,
405
+ "21": 0.866,
406
+ "22": 0.866,
407
+ "25": 0.87,
408
+ "26": 0.739
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.956000030040741,
414
+ "2": 0.9320000410079956,
415
+ "3": 0.9050000309944153,
416
+ "5": 0.9110000729560852,
417
+ "6": 0.8740000128746033
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.853,
449
+ "2": 0.64,
450
+ "3": 0.585,
451
+ "5": 0.786,
452
+ "6": 0.726
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.88,
456
+ "2": 0.872,
457
+ "3": 0.616,
458
+ "5": 0.795,
459
+ "6": 0.749
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.924,
463
+ "2": 0.868,
464
+ "3": 0.787,
465
+ "5": 0.788,
466
+ "6": 0.758
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9740000367164612,
472
+ "5.0": 0.9750000238418579
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.613,
492
+ "5.0": 0.613
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.82,
496
+ "5.0": 0.82
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.922,
500
+ "5.0": 0.922
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9550000429153442,
506
+ "Python": 0.9800000190734863,
507
+ "HTML": 0.987000048160553,
508
+ "Java": 0.9670000672340393,
509
+ "PHP": 0.9550000429153442
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.549,
541
+ "Python": 0.598,
542
+ "HTML": 0.91,
543
+ "Java": 0.636,
544
+ "PHP": 0.644
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.657,
548
+ "Python": 0.625,
549
+ "HTML": 0.908,
550
+ "Java": 0.842,
551
+ "PHP": 0.887
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.692,
555
+ "Python": 0.952,
556
+ "HTML": 0.933,
557
+ "Java": 0.884,
558
+ "PHP": 0.916
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.9360000491142273,
564
+ "1": 0.9860000610351562,
565
+ "2": 0.9380000233650208,
566
+ "3": 0.9490000605583191
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.858,
594
+ "1": 0.948,
595
+ "2": 0.773,
596
+ "3": 0.607
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.866,
600
+ "1": 0.957,
601
+ "2": 0.848,
602
+ "3": 0.659
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.859,
606
+ "1": 0.957,
607
+ "2": 0.858,
608
+ "3": 0.855
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 1.0,
614
+ "fr": 1.0,
615
+ "de": 0.999000072479248,
616
+ "es": 1.0,
617
+ "nl": 1.0
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 1.0,
649
+ "fr": 0.99,
650
+ "de": 0.893,
651
+ "es": 0.907,
652
+ "nl": 0.608
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 1.0,
656
+ "fr": 0.994,
657
+ "de": 0.896,
658
+ "es": 0.924,
659
+ "nl": 0.852
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 0.998,
663
+ "fr": 0.994,
664
+ "de": 0.944,
665
+ "es": 1.0,
666
+ "nl": 0.868
667
+ }
668
+ }
669
+ }
670
+ }
sparse_probing/saebench_gemma-2-2b_width-2pow12_date-0108/saebench_gemma-2-2b_width-2pow12_date-0108_TopK_gemma-2-2b__0108_resid_post_layer_12_trainer_4_eval_results.json ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "sparse_probing",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "LabHC/bias_in_bios_class_set2",
8
+ "LabHC/bias_in_bios_class_set3",
9
+ "canrager/amazon_reviews_mcauley_1and5",
10
+ "canrager/amazon_reviews_mcauley_1and5_sentiment",
11
+ "codeparrot/github-code",
12
+ "fancyzhx/ag_news",
13
+ "Helsinki-NLP/europarl"
14
+ ],
15
+ "probe_train_set_size": 4000,
16
+ "probe_test_set_size": 1000,
17
+ "context_length": 128,
18
+ "sae_batch_size": 125,
19
+ "llm_batch_size": 32,
20
+ "llm_dtype": "bfloat16",
21
+ "model_name": "gemma-2-2b",
22
+ "k_values": [
23
+ 1,
24
+ 2,
25
+ 5
26
+ ],
27
+ "lower_vram_usage": false
28
+ },
29
+ "eval_id": "474e5176-e4dd-4936-98d6-06185a70b097",
30
+ "datetime_epoch_millis": 1737018186529,
31
+ "eval_result_metrics": {
32
+ "llm": {
33
+ "llm_test_accuracy": 0.9595250379294157,
34
+ "llm_top_1_test_accuracy": 0.6508312500000001,
35
+ "llm_top_2_test_accuracy": 0.7238125,
36
+ "llm_top_5_test_accuracy": 0.7825500000000001,
37
+ "llm_top_10_test_accuracy": null,
38
+ "llm_top_20_test_accuracy": null,
39
+ "llm_top_50_test_accuracy": null,
40
+ "llm_top_100_test_accuracy": null
41
+ },
42
+ "sae": {
43
+ "sae_test_accuracy": 0.9578500423580409,
44
+ "sae_top_1_test_accuracy": 0.7383687500000001,
45
+ "sae_top_2_test_accuracy": 0.7917375,
46
+ "sae_top_5_test_accuracy": 0.8625437500000002,
47
+ "sae_top_10_test_accuracy": null,
48
+ "sae_top_20_test_accuracy": null,
49
+ "sae_top_50_test_accuracy": null,
50
+ "sae_top_100_test_accuracy": null
51
+ }
52
+ },
53
+ "eval_result_details": [
54
+ {
55
+ "dataset_name": "LabHC/bias_in_bios_class_set1_results",
56
+ "llm_test_accuracy": 0.9694000363349915,
57
+ "llm_top_1_test_accuracy": 0.6436000000000001,
58
+ "llm_top_2_test_accuracy": 0.6874,
59
+ "llm_top_5_test_accuracy": 0.7908,
60
+ "llm_top_10_test_accuracy": null,
61
+ "llm_top_20_test_accuracy": null,
62
+ "llm_top_50_test_accuracy": null,
63
+ "llm_top_100_test_accuracy": null,
64
+ "sae_test_accuracy": 0.967400050163269,
65
+ "sae_top_1_test_accuracy": 0.7686,
66
+ "sae_top_2_test_accuracy": 0.8573999999999999,
67
+ "sae_top_5_test_accuracy": 0.8926000000000001,
68
+ "sae_top_10_test_accuracy": null,
69
+ "sae_top_20_test_accuracy": null,
70
+ "sae_top_50_test_accuracy": null,
71
+ "sae_top_100_test_accuracy": null
72
+ },
73
+ {
74
+ "dataset_name": "LabHC/bias_in_bios_class_set2_results",
75
+ "llm_test_accuracy": 0.9560000419616699,
76
+ "llm_top_1_test_accuracy": 0.6704,
77
+ "llm_top_2_test_accuracy": 0.7288,
78
+ "llm_top_5_test_accuracy": 0.7596,
79
+ "llm_top_10_test_accuracy": null,
80
+ "llm_top_20_test_accuracy": null,
81
+ "llm_top_50_test_accuracy": null,
82
+ "llm_top_100_test_accuracy": null,
83
+ "sae_test_accuracy": 0.9498000502586365,
84
+ "sae_top_1_test_accuracy": 0.7308,
85
+ "sae_top_2_test_accuracy": 0.7827999999999999,
86
+ "sae_top_5_test_accuracy": 0.8001999999999999,
87
+ "sae_top_10_test_accuracy": null,
88
+ "sae_top_20_test_accuracy": null,
89
+ "sae_top_50_test_accuracy": null,
90
+ "sae_top_100_test_accuracy": null
91
+ },
92
+ {
93
+ "dataset_name": "LabHC/bias_in_bios_class_set3_results",
94
+ "llm_test_accuracy": 0.9276000380516052,
95
+ "llm_top_1_test_accuracy": 0.681,
96
+ "llm_top_2_test_accuracy": 0.7408,
97
+ "llm_top_5_test_accuracy": 0.7662000000000001,
98
+ "llm_top_10_test_accuracy": null,
99
+ "llm_top_20_test_accuracy": null,
100
+ "llm_top_50_test_accuracy": null,
101
+ "llm_top_100_test_accuracy": null,
102
+ "sae_test_accuracy": 0.9280000329017639,
103
+ "sae_top_1_test_accuracy": 0.7548,
104
+ "sae_top_2_test_accuracy": 0.7584,
105
+ "sae_top_5_test_accuracy": 0.8280000000000001,
106
+ "sae_top_10_test_accuracy": null,
107
+ "sae_top_20_test_accuracy": null,
108
+ "sae_top_50_test_accuracy": null,
109
+ "sae_top_100_test_accuracy": null
110
+ },
111
+ {
112
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
113
+ "llm_test_accuracy": 0.9204000473022461,
114
+ "llm_top_1_test_accuracy": 0.6142,
115
+ "llm_top_2_test_accuracy": 0.6544000000000001,
116
+ "llm_top_5_test_accuracy": 0.6809999999999999,
117
+ "llm_top_10_test_accuracy": null,
118
+ "llm_top_20_test_accuracy": null,
119
+ "llm_top_50_test_accuracy": null,
120
+ "llm_top_100_test_accuracy": null,
121
+ "sae_test_accuracy": 0.9178000450134277,
122
+ "sae_top_1_test_accuracy": 0.6814,
123
+ "sae_top_2_test_accuracy": 0.7545999999999999,
124
+ "sae_top_5_test_accuracy": 0.79,
125
+ "sae_top_10_test_accuracy": null,
126
+ "sae_top_20_test_accuracy": null,
127
+ "sae_top_50_test_accuracy": null,
128
+ "sae_top_100_test_accuracy": null
129
+ },
130
+ {
131
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
132
+ "llm_test_accuracy": 0.9810000360012054,
133
+ "llm_top_1_test_accuracy": 0.672,
134
+ "llm_top_2_test_accuracy": 0.724,
135
+ "llm_top_5_test_accuracy": 0.766,
136
+ "llm_top_10_test_accuracy": null,
137
+ "llm_top_20_test_accuracy": null,
138
+ "llm_top_50_test_accuracy": null,
139
+ "llm_top_100_test_accuracy": null,
140
+ "sae_test_accuracy": 0.9785000383853912,
141
+ "sae_top_1_test_accuracy": 0.676,
142
+ "sae_top_2_test_accuracy": 0.695,
143
+ "sae_top_5_test_accuracy": 0.916,
144
+ "sae_top_10_test_accuracy": null,
145
+ "sae_top_20_test_accuracy": null,
146
+ "sae_top_50_test_accuracy": null,
147
+ "sae_top_100_test_accuracy": null
148
+ },
149
+ {
150
+ "dataset_name": "codeparrot/github-code_results",
151
+ "llm_test_accuracy": 0.9718000411987304,
152
+ "llm_top_1_test_accuracy": 0.6504000000000001,
153
+ "llm_top_2_test_accuracy": 0.6944000000000001,
154
+ "llm_top_5_test_accuracy": 0.7652,
155
+ "llm_top_10_test_accuracy": null,
156
+ "llm_top_20_test_accuracy": null,
157
+ "llm_top_50_test_accuracy": null,
158
+ "llm_top_100_test_accuracy": null,
159
+ "sae_test_accuracy": 0.9664000511169434,
160
+ "sae_top_1_test_accuracy": 0.6374,
161
+ "sae_top_2_test_accuracy": 0.7056,
162
+ "sae_top_5_test_accuracy": 0.8251999999999999,
163
+ "sae_top_10_test_accuracy": null,
164
+ "sae_top_20_test_accuracy": null,
165
+ "sae_top_50_test_accuracy": null,
166
+ "sae_top_100_test_accuracy": null
167
+ },
168
+ {
169
+ "dataset_name": "fancyzhx/ag_news_results",
170
+ "llm_test_accuracy": 0.950000062584877,
171
+ "llm_top_1_test_accuracy": 0.63225,
172
+ "llm_top_2_test_accuracy": 0.7775,
173
+ "llm_top_5_test_accuracy": 0.825,
174
+ "llm_top_10_test_accuracy": null,
175
+ "llm_top_20_test_accuracy": null,
176
+ "llm_top_50_test_accuracy": null,
177
+ "llm_top_100_test_accuracy": null,
178
+ "sae_test_accuracy": 0.9555000513792038,
179
+ "sae_top_1_test_accuracy": 0.7777499999999999,
180
+ "sae_top_2_test_accuracy": 0.8455,
181
+ "sae_top_5_test_accuracy": 0.89275,
182
+ "sae_top_10_test_accuracy": null,
183
+ "sae_top_20_test_accuracy": null,
184
+ "sae_top_50_test_accuracy": null,
185
+ "sae_top_100_test_accuracy": null
186
+ },
187
+ {
188
+ "dataset_name": "Helsinki-NLP/europarl_results",
189
+ "llm_test_accuracy": 1.0,
190
+ "llm_top_1_test_accuracy": 0.6428,
191
+ "llm_top_2_test_accuracy": 0.7831999999999999,
192
+ "llm_top_5_test_accuracy": 0.9065999999999999,
193
+ "llm_top_10_test_accuracy": null,
194
+ "llm_top_20_test_accuracy": null,
195
+ "llm_top_50_test_accuracy": null,
196
+ "llm_top_100_test_accuracy": null,
197
+ "sae_test_accuracy": 0.9994000196456909,
198
+ "sae_top_1_test_accuracy": 0.8802000000000001,
199
+ "sae_top_2_test_accuracy": 0.9346,
200
+ "sae_top_5_test_accuracy": 0.9556000000000001,
201
+ "sae_top_10_test_accuracy": null,
202
+ "sae_top_20_test_accuracy": null,
203
+ "sae_top_50_test_accuracy": null,
204
+ "sae_top_100_test_accuracy": null
205
+ }
206
+ ],
207
+ "sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
208
+ "sae_lens_id": "custom_sae",
209
+ "sae_lens_release_id": "saebench_gemma-2-2b_width-2pow12_date-0108_TopKTrainer_google_gemma-2-2b_ctx1024_0108_resid_post_layer_12_trainer_4",
210
+ "sae_lens_version": "5.3.1",
211
+ "sae_cfg_dict": {
212
+ "model_name": "gemma-2-2b",
213
+ "d_in": 2304,
214
+ "d_sae": 4096,
215
+ "hook_layer": 12,
216
+ "hook_name": "blocks.12.hook_resid_post",
217
+ "context_size": null,
218
+ "hook_head_index": null,
219
+ "architecture": "topk",
220
+ "apply_b_dec_to_input": null,
221
+ "finetuning_scaling_factor": null,
222
+ "activation_fn_str": "",
223
+ "prepend_bos": true,
224
+ "normalize_activations": "none",
225
+ "dtype": "bfloat16",
226
+ "device": "",
227
+ "dataset_path": "",
228
+ "dataset_trust_remote_code": true,
229
+ "seqpos_slice": [
230
+ null
231
+ ],
232
+ "training_tokens": 499998720,
233
+ "sae_lens_training_version": null,
234
+ "neuronpedia_id": null
235
+ },
236
+ "eval_result_unstructured": {
237
+ "LabHC/bias_in_bios_class_set1_results": {
238
+ "sae_test_accuracy": {
239
+ "0": 0.9480000734329224,
240
+ "1": 0.9640000462532043,
241
+ "2": 0.9540000557899475,
242
+ "6": 0.9890000224113464,
243
+ "9": 0.9820000529289246
244
+ },
245
+ "llm_test_accuracy": {
246
+ "0": 0.9510000348091125,
247
+ "1": 0.9670000672340393,
248
+ "2": 0.9520000219345093,
249
+ "6": 0.9930000305175781,
250
+ "9": 0.984000027179718
251
+ },
252
+ "llm_top_1_test_accuracy": {
253
+ "0": 0.568,
254
+ "1": 0.629,
255
+ "2": 0.679,
256
+ "6": 0.791,
257
+ "9": 0.551
258
+ },
259
+ "llm_top_2_test_accuracy": {
260
+ "0": 0.585,
261
+ "1": 0.666,
262
+ "2": 0.673,
263
+ "6": 0.801,
264
+ "9": 0.712
265
+ },
266
+ "llm_top_5_test_accuracy": {
267
+ "0": 0.72,
268
+ "1": 0.707,
269
+ "2": 0.764,
270
+ "6": 0.899,
271
+ "9": 0.864
272
+ },
273
+ "sae_top_1_test_accuracy": {
274
+ "0": 0.559,
275
+ "1": 0.623,
276
+ "2": 0.873,
277
+ "6": 0.838,
278
+ "9": 0.95
279
+ },
280
+ "sae_top_2_test_accuracy": {
281
+ "0": 0.842,
282
+ "1": 0.647,
283
+ "2": 0.872,
284
+ "6": 0.98,
285
+ "9": 0.946
286
+ },
287
+ "sae_top_5_test_accuracy": {
288
+ "0": 0.88,
289
+ "1": 0.76,
290
+ "2": 0.887,
291
+ "6": 0.976,
292
+ "9": 0.96
293
+ }
294
+ },
295
+ "LabHC/bias_in_bios_class_set2_results": {
296
+ "sae_test_accuracy": {
297
+ "11": 0.9630000591278076,
298
+ "13": 0.9530000686645508,
299
+ "14": 0.9540000557899475,
300
+ "18": 0.9220000505447388,
301
+ "19": 0.9570000171661377
302
+ },
303
+ "llm_test_accuracy": {
304
+ "11": 0.9650000333786011,
305
+ "13": 0.9540000557899475,
306
+ "14": 0.9630000591278076,
307
+ "18": 0.9380000233650208,
308
+ "19": 0.9600000381469727
309
+ },
310
+ "llm_top_1_test_accuracy": {
311
+ "11": 0.552,
312
+ "13": 0.673,
313
+ "14": 0.64,
314
+ "18": 0.696,
315
+ "19": 0.791
316
+ },
317
+ "llm_top_2_test_accuracy": {
318
+ "11": 0.759,
319
+ "13": 0.722,
320
+ "14": 0.672,
321
+ "18": 0.722,
322
+ "19": 0.769
323
+ },
324
+ "llm_top_5_test_accuracy": {
325
+ "11": 0.784,
326
+ "13": 0.747,
327
+ "14": 0.729,
328
+ "18": 0.713,
329
+ "19": 0.825
330
+ },
331
+ "sae_top_1_test_accuracy": {
332
+ "11": 0.726,
333
+ "13": 0.696,
334
+ "14": 0.672,
335
+ "18": 0.702,
336
+ "19": 0.858
337
+ },
338
+ "sae_top_2_test_accuracy": {
339
+ "11": 0.844,
340
+ "13": 0.696,
341
+ "14": 0.829,
342
+ "18": 0.703,
343
+ "19": 0.842
344
+ },
345
+ "sae_top_5_test_accuracy": {
346
+ "11": 0.834,
347
+ "13": 0.74,
348
+ "14": 0.849,
349
+ "18": 0.727,
350
+ "19": 0.851
351
+ }
352
+ },
353
+ "LabHC/bias_in_bios_class_set3_results": {
354
+ "sae_test_accuracy": {
355
+ "20": 0.9570000171661377,
356
+ "21": 0.9270000457763672,
357
+ "22": 0.9170000553131104,
358
+ "25": 0.956000030040741,
359
+ "26": 0.8830000162124634
360
+ },
361
+ "llm_test_accuracy": {
362
+ "20": 0.9540000557899475,
363
+ "21": 0.9280000329017639,
364
+ "22": 0.9100000262260437,
365
+ "25": 0.9550000429153442,
366
+ "26": 0.8910000324249268
367
+ },
368
+ "llm_top_1_test_accuracy": {
369
+ "20": 0.705,
370
+ "21": 0.748,
371
+ "22": 0.627,
372
+ "25": 0.684,
373
+ "26": 0.641
374
+ },
375
+ "llm_top_2_test_accuracy": {
376
+ "20": 0.811,
377
+ "21": 0.763,
378
+ "22": 0.688,
379
+ "25": 0.768,
380
+ "26": 0.674
381
+ },
382
+ "llm_top_5_test_accuracy": {
383
+ "20": 0.859,
384
+ "21": 0.782,
385
+ "22": 0.724,
386
+ "25": 0.791,
387
+ "26": 0.675
388
+ },
389
+ "sae_top_1_test_accuracy": {
390
+ "20": 0.843,
391
+ "21": 0.679,
392
+ "22": 0.681,
393
+ "25": 0.888,
394
+ "26": 0.683
395
+ },
396
+ "sae_top_2_test_accuracy": {
397
+ "20": 0.895,
398
+ "21": 0.681,
399
+ "22": 0.663,
400
+ "25": 0.879,
401
+ "26": 0.674
402
+ },
403
+ "sae_top_5_test_accuracy": {
404
+ "20": 0.905,
405
+ "21": 0.829,
406
+ "22": 0.789,
407
+ "25": 0.875,
408
+ "26": 0.742
409
+ }
410
+ },
411
+ "canrager/amazon_reviews_mcauley_1and5_results": {
412
+ "sae_test_accuracy": {
413
+ "1": 0.9490000605583191,
414
+ "2": 0.9350000619888306,
415
+ "3": 0.9100000262260437,
416
+ "5": 0.9270000457763672,
417
+ "6": 0.8680000305175781
418
+ },
419
+ "llm_test_accuracy": {
420
+ "1": 0.956000030040741,
421
+ "2": 0.9270000457763672,
422
+ "3": 0.9250000715255737,
423
+ "5": 0.9250000715255737,
424
+ "6": 0.8690000176429749
425
+ },
426
+ "llm_top_1_test_accuracy": {
427
+ "1": 0.724,
428
+ "2": 0.597,
429
+ "3": 0.592,
430
+ "5": 0.577,
431
+ "6": 0.581
432
+ },
433
+ "llm_top_2_test_accuracy": {
434
+ "1": 0.752,
435
+ "2": 0.653,
436
+ "3": 0.602,
437
+ "5": 0.635,
438
+ "6": 0.63
439
+ },
440
+ "llm_top_5_test_accuracy": {
441
+ "1": 0.775,
442
+ "2": 0.648,
443
+ "3": 0.651,
444
+ "5": 0.651,
445
+ "6": 0.68
446
+ },
447
+ "sae_top_1_test_accuracy": {
448
+ "1": 0.858,
449
+ "2": 0.605,
450
+ "3": 0.599,
451
+ "5": 0.569,
452
+ "6": 0.776
453
+ },
454
+ "sae_top_2_test_accuracy": {
455
+ "1": 0.866,
456
+ "2": 0.676,
457
+ "3": 0.655,
458
+ "5": 0.8,
459
+ "6": 0.776
460
+ },
461
+ "sae_top_5_test_accuracy": {
462
+ "1": 0.91,
463
+ "2": 0.752,
464
+ "3": 0.701,
465
+ "5": 0.815,
466
+ "6": 0.772
467
+ }
468
+ },
469
+ "canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
470
+ "sae_test_accuracy": {
471
+ "1.0": 0.9780000448226929,
472
+ "5.0": 0.9790000319480896
473
+ },
474
+ "llm_test_accuracy": {
475
+ "1.0": 0.9800000190734863,
476
+ "5.0": 0.9820000529289246
477
+ },
478
+ "llm_top_1_test_accuracy": {
479
+ "1.0": 0.672,
480
+ "5.0": 0.672
481
+ },
482
+ "llm_top_2_test_accuracy": {
483
+ "1.0": 0.724,
484
+ "5.0": 0.724
485
+ },
486
+ "llm_top_5_test_accuracy": {
487
+ "1.0": 0.766,
488
+ "5.0": 0.766
489
+ },
490
+ "sae_top_1_test_accuracy": {
491
+ "1.0": 0.676,
492
+ "5.0": 0.676
493
+ },
494
+ "sae_top_2_test_accuracy": {
495
+ "1.0": 0.695,
496
+ "5.0": 0.695
497
+ },
498
+ "sae_top_5_test_accuracy": {
499
+ "1.0": 0.916,
500
+ "5.0": 0.916
501
+ }
502
+ },
503
+ "codeparrot/github-code_results": {
504
+ "sae_test_accuracy": {
505
+ "C": 0.9590000510215759,
506
+ "Python": 0.9820000529289246,
507
+ "HTML": 0.9810000658035278,
508
+ "Java": 0.9540000557899475,
509
+ "PHP": 0.956000030040741
510
+ },
511
+ "llm_test_accuracy": {
512
+ "C": 0.9550000429153442,
513
+ "Python": 0.9890000224113464,
514
+ "HTML": 0.9920000433921814,
515
+ "Java": 0.9650000333786011,
516
+ "PHP": 0.9580000638961792
517
+ },
518
+ "llm_top_1_test_accuracy": {
519
+ "C": 0.664,
520
+ "Python": 0.633,
521
+ "HTML": 0.725,
522
+ "Java": 0.637,
523
+ "PHP": 0.593
524
+ },
525
+ "llm_top_2_test_accuracy": {
526
+ "C": 0.665,
527
+ "Python": 0.68,
528
+ "HTML": 0.799,
529
+ "Java": 0.679,
530
+ "PHP": 0.649
531
+ },
532
+ "llm_top_5_test_accuracy": {
533
+ "C": 0.759,
534
+ "Python": 0.734,
535
+ "HTML": 0.909,
536
+ "Java": 0.715,
537
+ "PHP": 0.709
538
+ },
539
+ "sae_top_1_test_accuracy": {
540
+ "C": 0.591,
541
+ "Python": 0.619,
542
+ "HTML": 0.73,
543
+ "Java": 0.625,
544
+ "PHP": 0.622
545
+ },
546
+ "sae_top_2_test_accuracy": {
547
+ "C": 0.598,
548
+ "Python": 0.635,
549
+ "HTML": 0.805,
550
+ "Java": 0.614,
551
+ "PHP": 0.876
552
+ },
553
+ "sae_top_5_test_accuracy": {
554
+ "C": 0.677,
555
+ "Python": 0.96,
556
+ "HTML": 0.92,
557
+ "Java": 0.69,
558
+ "PHP": 0.879
559
+ }
560
+ },
561
+ "fancyzhx/ag_news_results": {
562
+ "sae_test_accuracy": {
563
+ "0": 0.940000057220459,
564
+ "1": 0.9880000352859497,
565
+ "2": 0.9410000443458557,
566
+ "3": 0.9530000686645508
567
+ },
568
+ "llm_test_accuracy": {
569
+ "0": 0.9390000700950623,
570
+ "1": 0.9910000562667847,
571
+ "2": 0.921000063419342,
572
+ "3": 0.9490000605583191
573
+ },
574
+ "llm_top_1_test_accuracy": {
575
+ "0": 0.566,
576
+ "1": 0.674,
577
+ "2": 0.664,
578
+ "3": 0.625
579
+ },
580
+ "llm_top_2_test_accuracy": {
581
+ "0": 0.795,
582
+ "1": 0.806,
583
+ "2": 0.698,
584
+ "3": 0.811
585
+ },
586
+ "llm_top_5_test_accuracy": {
587
+ "0": 0.822,
588
+ "1": 0.879,
589
+ "2": 0.75,
590
+ "3": 0.849
591
+ },
592
+ "sae_top_1_test_accuracy": {
593
+ "0": 0.794,
594
+ "1": 0.804,
595
+ "2": 0.826,
596
+ "3": 0.687
597
+ },
598
+ "sae_top_2_test_accuracy": {
599
+ "0": 0.789,
600
+ "1": 0.962,
601
+ "2": 0.845,
602
+ "3": 0.786
603
+ },
604
+ "sae_top_5_test_accuracy": {
605
+ "0": 0.872,
606
+ "1": 0.981,
607
+ "2": 0.884,
608
+ "3": 0.834
609
+ }
610
+ },
611
+ "Helsinki-NLP/europarl_results": {
612
+ "sae_test_accuracy": {
613
+ "en": 1.0,
614
+ "fr": 1.0,
615
+ "de": 1.0,
616
+ "es": 0.9980000257492065,
617
+ "nl": 0.999000072479248
618
+ },
619
+ "llm_test_accuracy": {
620
+ "en": 1.0,
621
+ "fr": 1.0,
622
+ "de": 1.0,
623
+ "es": 1.0,
624
+ "nl": 1.0
625
+ },
626
+ "llm_top_1_test_accuracy": {
627
+ "en": 0.736,
628
+ "fr": 0.594,
629
+ "de": 0.751,
630
+ "es": 0.501,
631
+ "nl": 0.632
632
+ },
633
+ "llm_top_2_test_accuracy": {
634
+ "en": 0.838,
635
+ "fr": 0.608,
636
+ "de": 0.827,
637
+ "es": 0.907,
638
+ "nl": 0.736
639
+ },
640
+ "llm_top_5_test_accuracy": {
641
+ "en": 0.89,
642
+ "fr": 0.921,
643
+ "de": 0.901,
644
+ "es": 0.975,
645
+ "nl": 0.846
646
+ },
647
+ "sae_top_1_test_accuracy": {
648
+ "en": 0.999,
649
+ "fr": 0.992,
650
+ "de": 0.894,
651
+ "es": 0.917,
652
+ "nl": 0.599
653
+ },
654
+ "sae_top_2_test_accuracy": {
655
+ "en": 0.999,
656
+ "fr": 0.991,
657
+ "de": 0.882,
658
+ "es": 0.949,
659
+ "nl": 0.852
660
+ },
661
+ "sae_top_5_test_accuracy": {
662
+ "en": 1.0,
663
+ "fr": 0.997,
664
+ "de": 0.939,
665
+ "es": 0.995,
666
+ "nl": 0.847
667
+ }
668
+ }
669
+ }
670
+ }