adamkarvonen
commited on
Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- autointerp/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_14_eval_results.json +63 -0
- core_with_feature_statistics/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_Standard_pythia-160m-deduped__0108_resid_post_layer_8_trainer_9_eval_results.json +0 -0
- core_with_feature_statistics/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_0_eval_results.json +0 -0
- core_with_feature_statistics/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_1_eval_results.json +0 -0
- core_with_feature_statistics/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_2_eval_results.json +0 -0
- core_with_feature_statistics/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_5_eval_results.json +0 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_10_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_11_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_6_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_7_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_8_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_9_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_12_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_13_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_14_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_15_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_16_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_17_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_30_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_31_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_32_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_33_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_34_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_Standard_pythia-160m-deduped__0108_resid_post_layer_8_trainer_11_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_Standard_pythia-160m-deduped__0108_resid_post_layer_8_trainer_6_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_Standard_pythia-160m-deduped__0108_resid_post_layer_8_trainer_8_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_0_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_1_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_2_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_3_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_4_eval_results.json +323 -0
- scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_5_eval_results.json +323 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_11_eval_results.json +670 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_6_eval_results.json +670 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_7_eval_results.json +670 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_8_eval_results.json +670 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_9_eval_results.json +670 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_12_eval_results.json +240 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_13_eval_results.json +240 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_14_eval_results.json +240 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_15_eval_results.json +240 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_16_eval_results.json +240 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_17_eval_results.json +240 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_30_eval_results.json +240 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_31_eval_results.json +240 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_32_eval_results.json +240 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_33_eval_results.json +240 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_34_eval_results.json +240 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_35_eval_results.json +240 -0
- sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_PAnneal_pythia-160m-deduped__0108_resid_post_layer_8_trainer_0_eval_results.json +240 -0
autointerp/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_14_eval_results.json
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "autointerp",
|
3 |
+
"eval_config": {
|
4 |
+
"model_name": "pythia-160m-deduped",
|
5 |
+
"n_latents": 1000,
|
6 |
+
"override_latents": null,
|
7 |
+
"dead_latent_threshold": 15,
|
8 |
+
"random_seed": 42,
|
9 |
+
"dataset_name": "monology/pile-uncopyrighted",
|
10 |
+
"llm_context_size": 128,
|
11 |
+
"llm_batch_size": 256,
|
12 |
+
"llm_dtype": "float32",
|
13 |
+
"buffer": 10,
|
14 |
+
"no_overlap": true,
|
15 |
+
"act_threshold_frac": 0.01,
|
16 |
+
"total_tokens": 2000000,
|
17 |
+
"scoring": true,
|
18 |
+
"max_tokens_in_explanation": 30,
|
19 |
+
"use_demos_in_explanation": true,
|
20 |
+
"n_top_ex_for_generation": 10,
|
21 |
+
"n_iw_sampled_ex_for_generation": 5,
|
22 |
+
"n_top_ex_for_scoring": 2,
|
23 |
+
"n_random_ex_for_scoring": 10,
|
24 |
+
"n_iw_sampled_ex_for_scoring": 2
|
25 |
+
},
|
26 |
+
"eval_id": "4c85f48f-2aa5-4b87-8725-5a2760cf21ff",
|
27 |
+
"datetime_epoch_millis": 1736507716865,
|
28 |
+
"eval_result_metrics": {
|
29 |
+
"autointerp": {
|
30 |
+
"autointerp_score": 0.7858571428571438
|
31 |
+
}
|
32 |
+
},
|
33 |
+
"eval_result_details": [],
|
34 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
35 |
+
"sae_lens_id": "custom_sae",
|
36 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAETrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_14",
|
37 |
+
"sae_lens_version": "5.3.0",
|
38 |
+
"sae_cfg_dict": {
|
39 |
+
"model_name": "pythia-160m-deduped",
|
40 |
+
"d_in": 768,
|
41 |
+
"d_sae": 4096,
|
42 |
+
"hook_layer": 8,
|
43 |
+
"hook_name": "blocks.8.hook_resid_post",
|
44 |
+
"context_size": null,
|
45 |
+
"hook_head_index": null,
|
46 |
+
"architecture": "gated",
|
47 |
+
"apply_b_dec_to_input": null,
|
48 |
+
"finetuning_scaling_factor": null,
|
49 |
+
"activation_fn_str": "",
|
50 |
+
"prepend_bos": true,
|
51 |
+
"normalize_activations": "none",
|
52 |
+
"dtype": "float32",
|
53 |
+
"device": "",
|
54 |
+
"dataset_path": "",
|
55 |
+
"dataset_trust_remote_code": true,
|
56 |
+
"seqpos_slice": [
|
57 |
+
null
|
58 |
+
],
|
59 |
+
"training_tokens": 499998720,
|
60 |
+
"sae_lens_training_version": null,
|
61 |
+
"neuronpedia_id": null
|
62 |
+
}
|
63 |
+
}
|
core_with_feature_statistics/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_Standard_pythia-160m-deduped__0108_resid_post_layer_8_trainer_9_eval_results.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
core_with_feature_statistics/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_0_eval_results.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
core_with_feature_statistics/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_1_eval_results.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
core_with_feature_statistics/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_2_eval_results.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
core_with_feature_statistics/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_5_eval_results.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_10_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 16,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": true,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "6821cdbf-7020-4ad3-886f-de6f8e5d6bbe",
|
73 |
+
"datetime_epoch_millis": 1737045014751,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.4082339591860339,
|
77 |
+
"scr_metric_threshold_2": 0.11149123507845864,
|
78 |
+
"scr_dir2_threshold_2": 0.11149123507845864,
|
79 |
+
"scr_dir1_threshold_5": 0.3416750626594651,
|
80 |
+
"scr_metric_threshold_5": 0.1892890545277563,
|
81 |
+
"scr_dir2_threshold_5": 0.1892890545277563,
|
82 |
+
"scr_dir1_threshold_10": -0.5817289977783154,
|
83 |
+
"scr_metric_threshold_10": 0.1336294815538908,
|
84 |
+
"scr_dir2_threshold_10": 0.1336294815538908,
|
85 |
+
"scr_dir1_threshold_20": -0.22324359143943145,
|
86 |
+
"scr_metric_threshold_20": -0.04439193420825799,
|
87 |
+
"scr_dir2_threshold_20": -0.04439193420825799,
|
88 |
+
"scr_dir1_threshold_50": -0.7886024633548797,
|
89 |
+
"scr_metric_threshold_50": -0.1417706213092358,
|
90 |
+
"scr_dir2_threshold_50": -0.1417706213092358,
|
91 |
+
"scr_dir1_threshold_100": -0.6922258889923828,
|
92 |
+
"scr_metric_threshold_100": 0.03102892688251857,
|
93 |
+
"scr_dir2_threshold_100": 0.03102892688251857,
|
94 |
+
"scr_dir1_threshold_500": -0.7603556224358132,
|
95 |
+
"scr_metric_threshold_500": -0.1948716003836253,
|
96 |
+
"scr_dir2_threshold_500": -0.1948716003836253
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.635294068148414,
|
103 |
+
"scr_metric_threshold_2": 0.025352120716357125,
|
104 |
+
"scr_dir2_threshold_2": 0.025352120716357125,
|
105 |
+
"scr_dir1_threshold_5": 0.03529434864073422,
|
106 |
+
"scr_metric_threshold_5": 0.08450695712092456,
|
107 |
+
"scr_dir2_threshold_5": 0.08450695712092456,
|
108 |
+
"scr_dir1_threshold_10": -0.44705847703889867,
|
109 |
+
"scr_metric_threshold_10": 0.13802806354425654,
|
110 |
+
"scr_dir2_threshold_10": 0.13802806354425654,
|
111 |
+
"scr_dir1_threshold_20": -0.29411723457011746,
|
112 |
+
"scr_metric_threshold_20": -0.005633897881634249,
|
113 |
+
"scr_dir2_threshold_20": -0.005633897881634249,
|
114 |
+
"scr_dir1_threshold_50": -1.2588228859293833,
|
115 |
+
"scr_metric_threshold_50": -0.08450712502132335,
|
116 |
+
"scr_dir2_threshold_50": -0.08450712502132335,
|
117 |
+
"scr_dir1_threshold_100": -0.7294111624723715,
|
118 |
+
"scr_metric_threshold_100": 0.09859144997441198,
|
119 |
+
"scr_dir2_threshold_100": 0.09859144997441198,
|
120 |
+
"scr_dir1_threshold_500": -1.741175711609016,
|
121 |
+
"scr_metric_threshold_500": -0.35492969002899977,
|
122 |
+
"scr_dir2_threshold_500": -0.35492969002899977
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.8114753617807134,
|
127 |
+
"scr_metric_threshold_2": 0.07028758191591404,
|
128 |
+
"scr_dir2_threshold_2": 0.07028758191591404,
|
129 |
+
"scr_dir1_threshold_5": 0.016393234383091822,
|
130 |
+
"scr_metric_threshold_5": 0.13418527978665729,
|
131 |
+
"scr_dir2_threshold_5": 0.13418527978665729,
|
132 |
+
"scr_dir1_threshold_10": -2.147541552261557,
|
133 |
+
"scr_metric_threshold_10": 0.1629392819145239,
|
134 |
+
"scr_dir2_threshold_10": 0.1629392819145239,
|
135 |
+
"scr_dir1_threshold_20": -1.2459019356856,
|
136 |
+
"scr_metric_threshold_20": 0.025559155320361612,
|
137 |
+
"scr_dir2_threshold_20": 0.025559155320361612,
|
138 |
+
"scr_dir1_threshold_50": -0.5409835745204756,
|
139 |
+
"scr_metric_threshold_50": -0.12140570212647647,
|
140 |
+
"scr_dir2_threshold_50": -0.12140570212647647,
|
141 |
+
"scr_dir1_threshold_100": -0.9262299567133407,
|
142 |
+
"scr_metric_threshold_100": 0.4696485744683809,
|
143 |
+
"scr_dir2_threshold_100": 0.4696485744683809,
|
144 |
+
"scr_dir1_threshold_500": -0.04918019171202157,
|
145 |
+
"scr_metric_threshold_500": 0.5878594297873524,
|
146 |
+
"scr_dir2_threshold_500": 0.5878594297873524
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.5747126988020154,
|
151 |
+
"scr_metric_threshold_2": 0.13450293213017933,
|
152 |
+
"scr_dir2_threshold_2": 0.13450293213017933,
|
153 |
+
"scr_dir1_threshold_5": 0.5747126988020154,
|
154 |
+
"scr_metric_threshold_5": 0.15789479187871047,
|
155 |
+
"scr_dir2_threshold_5": 0.15789479187871047,
|
156 |
+
"scr_dir1_threshold_10": -2.7356315145469567,
|
157 |
+
"scr_metric_threshold_10": 0.18128647734465844,
|
158 |
+
"scr_dir2_threshold_10": 0.18128647734465844,
|
159 |
+
"scr_dir1_threshold_20": -2.7241374112952843,
|
160 |
+
"scr_metric_threshold_20": -0.03801172852071732,
|
161 |
+
"scr_dir2_threshold_20": -0.03801172852071732,
|
162 |
+
"scr_dir1_threshold_50": -3.689654416429504,
|
163 |
+
"scr_metric_threshold_50": -0.06432752716716907,
|
164 |
+
"scr_dir2_threshold_50": -0.06432752716716907,
|
165 |
+
"scr_dir1_threshold_100": -4.137930609241596,
|
166 |
+
"scr_metric_threshold_100": 0.0935672647115414,
|
167 |
+
"scr_dir2_threshold_100": 0.0935672647115414,
|
168 |
+
"scr_dir1_threshold_500": -3.425286616087223,
|
169 |
+
"scr_metric_threshold_500": -0.046783719497062295,
|
170 |
+
"scr_dir2_threshold_500": -0.046783719497062295
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.4312501257285738,
|
175 |
+
"scr_metric_threshold_2": 0.11070121008413712,
|
176 |
+
"scr_dir2_threshold_2": 0.11070121008413712,
|
177 |
+
"scr_dir1_threshold_5": 0.8437501164153461,
|
178 |
+
"scr_metric_threshold_5": 0.2324723872163666,
|
179 |
+
"scr_dir2_threshold_5": 0.2324723872163666,
|
180 |
+
"scr_dir1_threshold_10": -0.7500001862645538,
|
181 |
+
"scr_metric_threshold_10": -0.16605170515454756,
|
182 |
+
"scr_dir2_threshold_10": -0.16605170515454756,
|
183 |
+
"scr_dir1_threshold_20": 0.5187501350418015,
|
184 |
+
"scr_metric_threshold_20": 0.3394833883411567,
|
185 |
+
"scr_dir2_threshold_20": 0.3394833883411567,
|
186 |
+
"scr_dir1_threshold_50": 0.2875000838190492,
|
187 |
+
"scr_metric_threshold_50": 0.3874539053795056,
|
188 |
+
"scr_dir2_threshold_50": 0.3874539053795056,
|
189 |
+
"scr_dir1_threshold_100": -0.437500419095246,
|
190 |
+
"scr_metric_threshold_100": 0.2546125412558675,
|
191 |
+
"scr_dir2_threshold_100": 0.2546125412558675,
|
192 |
+
"scr_dir1_threshold_500": -1.6375004936010675,
|
193 |
+
"scr_metric_threshold_500": -0.7195570957452845,
|
194 |
+
"scr_dir2_threshold_500": -0.7195570957452845
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": -0.013422641596987927,
|
199 |
+
"scr_metric_threshold_2": 0.38388635231764584,
|
200 |
+
"scr_dir2_threshold_2": 0.38388635231764584,
|
201 |
+
"scr_dir1_threshold_5": 0.28187907381612815,
|
202 |
+
"scr_metric_threshold_5": 0.23222757785114265,
|
203 |
+
"scr_dir2_threshold_5": 0.23222757785114265,
|
204 |
+
"scr_dir1_threshold_10": 0.4765101771897499,
|
205 |
+
"scr_metric_threshold_10": 0.559241783811437,
|
206 |
+
"scr_dir2_threshold_10": 0.559241783811437,
|
207 |
+
"scr_dir1_threshold_20": 0.5167785020117561,
|
208 |
+
"scr_metric_threshold_20": 0.3459715880794115,
|
209 |
+
"scr_dir2_threshold_20": 0.3459715880794115,
|
210 |
+
"scr_dir1_threshold_50": 0.16107369931906715,
|
211 |
+
"scr_metric_threshold_50": -0.6540284119205886,
|
212 |
+
"scr_dir2_threshold_50": -0.6540284119205886,
|
213 |
+
"scr_dir1_threshold_100": -0.18120806174559145,
|
214 |
+
"scr_metric_threshold_100": 0.0,
|
215 |
+
"scr_dir2_threshold_100": 0.0,
|
216 |
+
"scr_dir1_threshold_500": -0.06711400804702444,
|
217 |
+
"scr_metric_threshold_500": 0.7488153225161744,
|
218 |
+
"scr_dir2_threshold_500": 0.7488153225161744
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.3043477697498678,
|
223 |
+
"scr_metric_threshold_2": 0.08962260702828691,
|
224 |
+
"scr_dir2_threshold_2": 0.08962260702828691,
|
225 |
+
"scr_dir1_threshold_5": 0.3985507434167107,
|
226 |
+
"scr_metric_threshold_5": 0.24999985942301584,
|
227 |
+
"scr_dir2_threshold_5": 0.24999985942301584,
|
228 |
+
"scr_dir1_threshold_10": 0.384057745666138,
|
229 |
+
"scr_metric_threshold_10": -0.12735853035197664,
|
230 |
+
"scr_dir2_threshold_10": -0.12735853035197664,
|
231 |
+
"scr_dir1_threshold_20": 0.5072462829164464,
|
232 |
+
"scr_metric_threshold_20": -0.38207559105593,
|
233 |
+
"scr_dir2_threshold_20": -0.38207559105593,
|
234 |
+
"scr_dir1_threshold_50": -1.2608696403335096,
|
235 |
+
"scr_metric_threshold_50": 0.7358490990421084,
|
236 |
+
"scr_dir2_threshold_50": 0.7358490990421084,
|
237 |
+
"scr_dir1_threshold_100": 0.21014479608302494,
|
238 |
+
"scr_metric_threshold_100": 0.6226413290710392,
|
239 |
+
"scr_dir2_threshold_100": 0.6226413290710392,
|
240 |
+
"scr_dir1_threshold_500": 0.23913035966649043,
|
241 |
+
"scr_metric_threshold_500": -0.0518869648585655,
|
242 |
+
"scr_dir2_threshold_500": -0.0518869648585655
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.34645683334217237,
|
247 |
+
"scr_metric_threshold_2": 0.11646579423663808,
|
248 |
+
"scr_dir2_threshold_2": 0.11646579423663808,
|
249 |
+
"scr_dir1_threshold_5": 0.34645683334217237,
|
250 |
+
"scr_metric_threshold_5": 0.20080325034404325,
|
251 |
+
"scr_dir2_threshold_5": 0.20080325034404325,
|
252 |
+
"scr_dir1_threshold_10": 0.37795295176671406,
|
253 |
+
"scr_metric_threshold_10": 0.2931727311397389,
|
254 |
+
"scr_dir2_threshold_10": 0.2931727311397389,
|
255 |
+
"scr_dir1_threshold_20": 0.5354330745616361,
|
256 |
+
"scr_metric_threshold_20": 0.0040160123441452325,
|
257 |
+
"scr_dir2_threshold_20": 0.0040160123441452325,
|
258 |
+
"scr_dir1_threshold_50": 0.6535431666578276,
|
259 |
+
"scr_metric_threshold_50": -0.5220883072688698,
|
260 |
+
"scr_dir2_threshold_50": -0.5220883072688698,
|
261 |
+
"scr_dir1_threshold_100": 0.8582677017534556,
|
262 |
+
"scr_metric_threshold_100": -0.09638549313984086,
|
263 |
+
"scr_dir2_threshold_100": -0.09638549313984086,
|
264 |
+
"scr_dir1_threshold_500": 0.8346454956031026,
|
265 |
+
"scr_metric_threshold_500": -0.6224900521289269,
|
266 |
+
"scr_dir2_threshold_500": -0.6224900521289269
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.17575745753350233,
|
271 |
+
"scr_metric_threshold_2": -0.038888717801489385,
|
272 |
+
"scr_dir2_threshold_2": -0.038888717801489385,
|
273 |
+
"scr_dir1_threshold_5": 0.23636345245952212,
|
274 |
+
"scr_metric_threshold_5": 0.22222233260118965,
|
275 |
+
"scr_dir2_threshold_5": 0.22222233260118965,
|
276 |
+
"scr_dir1_threshold_10": 0.18787887326284092,
|
277 |
+
"scr_metric_threshold_10": 0.027777750183035924,
|
278 |
+
"scr_dir2_threshold_10": 0.027777750183035924,
|
279 |
+
"scr_dir1_threshold_20": 0.39999985550391026,
|
280 |
+
"scr_metric_threshold_20": -0.6444444002928574,
|
281 |
+
"scr_dir2_threshold_20": -0.6444444002928574,
|
282 |
+
"scr_dir1_threshold_50": -0.6606061394221096,
|
283 |
+
"scr_metric_threshold_50": -0.811110901391073,
|
284 |
+
"scr_dir2_threshold_50": -0.811110901391073,
|
285 |
+
"scr_dir1_threshold_100": -0.19393940050739802,
|
286 |
+
"scr_metric_threshold_100": -1.1944442512812514,
|
287 |
+
"scr_dir2_threshold_100": -1.1944442512812514,
|
288 |
+
"scr_dir1_threshold_500": -0.23636381369974652,
|
289 |
+
"scr_metric_threshold_500": -1.1000000331136903,
|
290 |
+
"scr_dir2_threshold_500": -1.1000000331136903
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_10",
|
296 |
+
"sae_lens_version": "5.3.1",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "batch_topk",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_11_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 16,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": true,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "a2c0b63b-f255-43e0-9603-442c7d1acd31",
|
73 |
+
"datetime_epoch_millis": 1737044868239,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.13976417353636858,
|
77 |
+
"scr_metric_threshold_2": 0.024924048438213962,
|
78 |
+
"scr_dir2_threshold_2": 0.024924048438213962,
|
79 |
+
"scr_dir1_threshold_5": 0.13155197306874242,
|
80 |
+
"scr_metric_threshold_5": -0.03343493380222885,
|
81 |
+
"scr_dir2_threshold_5": -0.03343493380222885,
|
82 |
+
"scr_dir1_threshold_10": -0.1170693348564642,
|
83 |
+
"scr_metric_threshold_10": 0.0005143816856057574,
|
84 |
+
"scr_dir2_threshold_10": 0.0005143816856057574,
|
85 |
+
"scr_dir1_threshold_20": -0.45551263859754576,
|
86 |
+
"scr_metric_threshold_20": -0.018684859538431416,
|
87 |
+
"scr_dir2_threshold_20": -0.018684859538431416,
|
88 |
+
"scr_dir1_threshold_50": -0.5093623265079062,
|
89 |
+
"scr_metric_threshold_50": -0.1627756493787575,
|
90 |
+
"scr_dir2_threshold_50": -0.1627756493787575,
|
91 |
+
"scr_dir1_threshold_100": -0.30203284175570927,
|
92 |
+
"scr_metric_threshold_100": -0.5106975097095607,
|
93 |
+
"scr_dir2_threshold_100": -0.5106975097095607,
|
94 |
+
"scr_dir1_threshold_500": -2.2494638035701557,
|
95 |
+
"scr_metric_threshold_500": -0.5275927513069945,
|
96 |
+
"scr_dir2_threshold_500": -0.5275927513069945
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.2823533866642731,
|
103 |
+
"scr_metric_threshold_2": 0.03943661356984455,
|
104 |
+
"scr_dir2_threshold_2": 0.03943661356984455,
|
105 |
+
"scr_dir1_threshold_5": 0.2823533866642731,
|
106 |
+
"scr_metric_threshold_5": 0.0676055992768194,
|
107 |
+
"scr_dir2_threshold_5": 0.0676055992768194,
|
108 |
+
"scr_dir1_threshold_10": 0.03529434864073422,
|
109 |
+
"scr_metric_threshold_10": 0.07605636214907138,
|
110 |
+
"scr_dir2_threshold_10": 0.07605636214907138,
|
111 |
+
"scr_dir1_threshold_20": -1.1764696395112701,
|
112 |
+
"scr_metric_threshold_20": 0.12676043568138684,
|
113 |
+
"scr_dir2_threshold_20": 0.12676043568138684,
|
114 |
+
"scr_dir1_threshold_50": -1.0705872948198678,
|
115 |
+
"scr_metric_threshold_50": 0.24507044429131927,
|
116 |
+
"scr_dir2_threshold_50": 0.24507044429131927,
|
117 |
+
"scr_dir1_threshold_100": 0.2470590380235389,
|
118 |
+
"scr_metric_threshold_100": 0.02816898570697485,
|
119 |
+
"scr_dir2_threshold_100": 0.02816898570697485,
|
120 |
+
"scr_dir1_threshold_500": -1.0470581965465786,
|
121 |
+
"scr_metric_threshold_500": -0.35492969002899977,
|
122 |
+
"scr_dir2_threshold_500": -0.35492969002899977
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.3442623191096433,
|
127 |
+
"scr_metric_threshold_2": 0.04472842659555243,
|
128 |
+
"scr_dir2_threshold_2": 0.04472842659555243,
|
129 |
+
"scr_dir1_threshold_5": 0.3606555534927351,
|
130 |
+
"scr_metric_threshold_5": -0.05111812021056244,
|
131 |
+
"scr_dir2_threshold_5": -0.05111812021056244,
|
132 |
+
"scr_dir1_threshold_10": -0.991803382808454,
|
133 |
+
"scr_metric_threshold_10": -0.07028739148575325,
|
134 |
+
"scr_dir2_threshold_10": -0.07028739148575325,
|
135 |
+
"scr_dir1_threshold_20": -0.7377053184940542,
|
136 |
+
"scr_metric_threshold_20": 0.047923463833218216,
|
137 |
+
"scr_dir2_threshold_20": 0.047923463833218216,
|
138 |
+
"scr_dir1_threshold_50": -2.122951212124173,
|
139 |
+
"scr_metric_threshold_50": -0.18530339999721973,
|
140 |
+
"scr_dir2_threshold_50": -0.18530339999721973,
|
141 |
+
"scr_dir1_threshold_100": -0.6229512121241731,
|
142 |
+
"scr_metric_threshold_100": -0.3706069904246002,
|
143 |
+
"scr_dir2_threshold_100": -0.3706069904246002,
|
144 |
+
"scr_dir1_threshold_500": -2.4590169140422704,
|
145 |
+
"scr_metric_threshold_500": 0.24600644149061873,
|
146 |
+
"scr_dir2_threshold_500": 0.24600644149061873
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.2758625887047159,
|
151 |
+
"scr_metric_threshold_2": 0.0,
|
152 |
+
"scr_dir2_threshold_2": 0.0,
|
153 |
+
"scr_dir1_threshold_5": 0.13793129435235796,
|
154 |
+
"scr_metric_threshold_5": 0.0,
|
155 |
+
"scr_dir2_threshold_5": 0.0,
|
156 |
+
"scr_dir1_threshold_10": 0.045977098117452646,
|
157 |
+
"scr_metric_threshold_10": 0.1871345294230828,
|
158 |
+
"scr_dir2_threshold_10": 0.1871345294230828,
|
159 |
+
"scr_dir1_threshold_20": 0.2873566919563886,
|
160 |
+
"scr_metric_threshold_20": 0.10818713348372759,
|
161 |
+
"scr_dir2_threshold_20": 0.10818713348372759,
|
162 |
+
"scr_dir1_threshold_50": 0.3563219965771866,
|
163 |
+
"scr_metric_threshold_50": 0.3362573303254483,
|
164 |
+
"scr_dir2_threshold_50": 0.3362573303254483,
|
165 |
+
"scr_dir1_threshold_100": 0.2988507952080613,
|
166 |
+
"scr_metric_threshold_100": -0.23099413573964134,
|
167 |
+
"scr_dir2_threshold_100": -0.23099413573964134,
|
168 |
+
"scr_dir1_threshold_500": -4.241378908728174,
|
169 |
+
"scr_metric_threshold_500": -0.37134511994824504,
|
170 |
+
"scr_dir2_threshold_500": -0.37134511994824504
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.24999981373544622,
|
175 |
+
"scr_metric_threshold_2": 0.011070186991408574,
|
176 |
+
"scr_dir2_threshold_2": 0.011070186991408574,
|
177 |
+
"scr_dir1_threshold_5": -0.06875024680053377,
|
178 |
+
"scr_metric_threshold_5": 0.04428052802231809,
|
179 |
+
"scr_dir2_threshold_5": 0.04428052802231809,
|
180 |
+
"scr_dir1_threshold_10": -0.5812500884756631,
|
181 |
+
"scr_metric_threshold_10": 0.1180811881161987,
|
182 |
+
"scr_dir2_threshold_10": 0.1180811881161987,
|
183 |
+
"scr_dir1_threshold_20": -1.5062506658957797,
|
184 |
+
"scr_metric_threshold_20": 0.1180811881161987,
|
185 |
+
"scr_dir2_threshold_20": 0.1180811881161987,
|
186 |
+
"scr_dir1_threshold_50": -1.7312504237518598,
|
187 |
+
"scr_metric_threshold_50": -0.48708470852891794,
|
188 |
+
"scr_dir2_threshold_50": -0.48708470852891794,
|
189 |
+
"scr_dir1_threshold_100": -1.7375007171185322,
|
190 |
+
"scr_metric_threshold_100": -0.44280440044991604,
|
191 |
+
"scr_dir2_threshold_100": -0.44280440044991604,
|
192 |
+
"scr_dir1_threshold_500": -1.7375007171185322,
|
193 |
+
"scr_metric_threshold_500": -0.7601474148082557,
|
194 |
+
"scr_dir2_threshold_500": -0.7601474148082557
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": -0.0067113207984939634,
|
199 |
+
"scr_metric_threshold_2": 0.004739557394605017,
|
200 |
+
"scr_dir2_threshold_2": 0.004739557394605017,
|
201 |
+
"scr_dir1_threshold_5": 0.10067101207053668,
|
202 |
+
"scr_metric_threshold_5": -0.12796211743921526,
|
203 |
+
"scr_dir2_threshold_5": -0.12796211743921526,
|
204 |
+
"scr_dir1_threshold_10": 0.13422841612509132,
|
205 |
+
"scr_metric_threshold_10": -0.1516587744665032,
|
206 |
+
"scr_dir2_threshold_10": -0.1516587744665032,
|
207 |
+
"scr_dir1_threshold_20": 0.1744967409470975,
|
208 |
+
"scr_metric_threshold_20": -0.3270142059602943,
|
209 |
+
"scr_dir2_threshold_20": -0.3270142059602943,
|
210 |
+
"scr_dir1_threshold_50": -0.05369096641899412,
|
211 |
+
"scr_metric_threshold_50": -0.6540284119205886,
|
212 |
+
"scr_dir2_threshold_50": -0.6540284119205886,
|
213 |
+
"scr_dir1_threshold_100": -0.6040264724542624,
|
214 |
+
"scr_metric_threshold_100": -0.6540284119205886,
|
215 |
+
"scr_dir2_threshold_100": -0.6540284119205886,
|
216 |
+
"scr_dir1_threshold_500": -2.2550329905600677,
|
217 |
+
"scr_metric_threshold_500": -0.6540284119205886,
|
218 |
+
"scr_dir2_threshold_500": -0.6540284119205886
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": -0.07246369299982378,
|
223 |
+
"scr_metric_threshold_2": 0.033018722042752274,
|
224 |
+
"scr_dir2_threshold_2": 0.033018722042752274,
|
225 |
+
"scr_dir1_threshold_5": 0.10144925658328928,
|
226 |
+
"scr_metric_threshold_5": -0.37264146964802336,
|
227 |
+
"scr_dir2_threshold_5": -0.37264146964802336,
|
228 |
+
"scr_dir1_threshold_10": 0.18840551541600584,
|
229 |
+
"scr_metric_threshold_10": -0.10849056869013178,
|
230 |
+
"scr_dir2_threshold_10": -0.10849056869013178,
|
231 |
+
"scr_dir1_threshold_20": 0.20289851316657856,
|
232 |
+
"scr_metric_threshold_20": -0.10849056869013178,
|
233 |
+
"scr_dir2_threshold_20": -0.10849056869013178,
|
234 |
+
"scr_dir1_threshold_50": 0.24637664258293684,
|
235 |
+
"scr_metric_threshold_50": 0.13679237060591493,
|
236 |
+
"scr_dir2_threshold_50": 0.13679237060591493,
|
237 |
+
"scr_dir1_threshold_100": -0.25362335741706316,
|
238 |
+
"scr_metric_threshold_100": -0.6981131757184187,
|
239 |
+
"scr_dir2_threshold_100": -0.6981131757184187,
|
240 |
+
"scr_dir1_threshold_500": -2.253623357417063,
|
241 |
+
"scr_metric_threshold_500": -0.7688681022388291,
|
242 |
+
"scr_dir2_threshold_500": -0.7688681022388291
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.06299223684908342,
|
247 |
+
"scr_metric_threshold_2": 0.044176614537739675,
|
248 |
+
"scr_dir2_threshold_2": 0.044176614537739675,
|
249 |
+
"scr_dir1_threshold_5": 0.12598447369816684,
|
250 |
+
"scr_metric_threshold_5": 0.14457835939779684,
|
251 |
+
"scr_dir2_threshold_5": 0.14457835939779684,
|
252 |
+
"scr_dir1_threshold_10": 0.2204723596440055,
|
253 |
+
"scr_metric_threshold_10": -0.12449805830099961,
|
254 |
+
"scr_dir2_threshold_10": -0.12449805830099961,
|
255 |
+
"scr_dir1_threshold_20": -0.09448788594583864,
|
256 |
+
"scr_metric_threshold_20": -0.12048180658078332,
|
257 |
+
"scr_dir2_threshold_20": -0.12048180658078332,
|
258 |
+
"scr_dir1_threshold_50": 0.5433069868358249,
|
259 |
+
"scr_metric_threshold_50": 0.24497986488178294,
|
260 |
+
"scr_dir2_threshold_50": 0.24497986488178294,
|
261 |
+
"scr_dir1_threshold_100": 0.7165354035069111,
|
262 |
+
"scr_metric_threshold_100": -0.5783131982151163,
|
263 |
+
"scr_dir2_threshold_100": -0.5783131982151163,
|
264 |
+
"scr_dir1_threshold_500": -2.5354326052338494,
|
265 |
+
"scr_metric_threshold_500": -0.3574296467742758,
|
266 |
+
"scr_dir2_threshold_500": -0.3574296467742758
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": -0.018181942973895696,
|
271 |
+
"scr_metric_threshold_2": 0.02222226637380919,
|
272 |
+
"scr_dir2_threshold_2": 0.02222226637380919,
|
273 |
+
"scr_dir1_threshold_5": 0.012121054489114207,
|
274 |
+
"scr_metric_threshold_5": 0.027777750183035924,
|
275 |
+
"scr_dir2_threshold_5": 0.027777750183035924,
|
276 |
+
"scr_dir1_threshold_10": 0.012121054489114207,
|
277 |
+
"scr_metric_threshold_10": 0.07777776673988103,
|
278 |
+
"scr_dir2_threshold_10": 0.07777776673988103,
|
279 |
+
"scr_dir1_threshold_20": -0.7939395450034877,
|
280 |
+
"scr_metric_threshold_20": 0.0055554838092267324,
|
281 |
+
"scr_dir2_threshold_20": 0.0055554838092267324,
|
282 |
+
"scr_dir1_threshold_50": -0.24242434094430362,
|
283 |
+
"scr_metric_threshold_50": -0.9388886846877992,
|
284 |
+
"scr_dir2_threshold_50": -0.9388886846877992,
|
285 |
+
"scr_dir1_threshold_100": -0.4606062116701544,
|
286 |
+
"scr_metric_threshold_100": -1.1388887509151797,
|
287 |
+
"scr_dir2_threshold_100": -1.1388887509151797,
|
288 |
+
"scr_dir1_threshold_500": -1.4666667389147114,
|
289 |
+
"scr_metric_threshold_500": -1.2000000662273804,
|
290 |
+
"scr_dir2_threshold_500": -1.2000000662273804
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_11",
|
296 |
+
"sae_lens_version": "5.3.1",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "batch_topk",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_6_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 16,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": true,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "3a2f5299-1cfa-4b03-bf7f-7b476fc3d816",
|
73 |
+
"datetime_epoch_millis": 1737044574841,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.2368455257447833,
|
77 |
+
"scr_metric_threshold_2": 0.11715524244585024,
|
78 |
+
"scr_dir2_threshold_2": 0.11715524244585024,
|
79 |
+
"scr_dir1_threshold_5": 0.19723680851085257,
|
80 |
+
"scr_metric_threshold_5": 0.15183397223654438,
|
81 |
+
"scr_dir2_threshold_5": 0.15183397223654438,
|
82 |
+
"scr_dir1_threshold_10": 0.2194711464513396,
|
83 |
+
"scr_metric_threshold_10": 0.18384952956341885,
|
84 |
+
"scr_dir2_threshold_10": 0.18384952956341885,
|
85 |
+
"scr_dir1_threshold_20": 0.18680647436097636,
|
86 |
+
"scr_metric_threshold_20": 0.245690146366616,
|
87 |
+
"scr_dir2_threshold_20": 0.245690146366616,
|
88 |
+
"scr_dir1_threshold_50": 0.11437781313851093,
|
89 |
+
"scr_metric_threshold_50": 0.29116164034200437,
|
90 |
+
"scr_dir2_threshold_50": 0.29116164034200437,
|
91 |
+
"scr_dir1_threshold_100": 0.08370590063498194,
|
92 |
+
"scr_metric_threshold_100": 0.20240721120284083,
|
93 |
+
"scr_dir2_threshold_100": 0.20240721120284083,
|
94 |
+
"scr_dir1_threshold_500": -0.4588741064132325,
|
95 |
+
"scr_metric_threshold_500": 0.05494461825409841,
|
96 |
+
"scr_dir2_threshold_500": 0.05494461825409841
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.45882372740634364,
|
103 |
+
"scr_metric_threshold_2": 0.04225347856046228,
|
104 |
+
"scr_dir2_threshold_2": 0.04225347856046228,
|
105 |
+
"scr_dir1_threshold_5": 0.5411762725936564,
|
106 |
+
"scr_metric_threshold_5": 0.0676055992768194,
|
107 |
+
"scr_dir2_threshold_5": 0.0676055992768194,
|
108 |
+
"scr_dir1_threshold_10": 0.5529415229611013,
|
109 |
+
"scr_metric_threshold_10": 0.12676043568138684,
|
110 |
+
"scr_dir2_threshold_10": 0.12676043568138684,
|
111 |
+
"scr_dir1_threshold_20": 0.5529415229611013,
|
112 |
+
"scr_metric_threshold_20": 0.2253520535561976,
|
113 |
+
"scr_dir2_threshold_20": 0.2253520535561976,
|
114 |
+
"scr_dir1_threshold_50": 0.6235295190117695,
|
115 |
+
"scr_metric_threshold_50": 0.014084492853487425,
|
116 |
+
"scr_dir2_threshold_50": 0.014084492853487425,
|
117 |
+
"scr_dir1_threshold_100": 0.5764706212343906,
|
118 |
+
"scr_metric_threshold_100": 0.04788720854169773,
|
119 |
+
"scr_dir2_threshold_100": 0.04788720854169773,
|
120 |
+
"scr_dir1_threshold_500": -0.2117646893828047,
|
121 |
+
"scr_metric_threshold_500": 0.10422534785604623,
|
122 |
+
"scr_dir2_threshold_500": 0.10422534785604623
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.5819671490409514,
|
127 |
+
"scr_metric_threshold_2": 0.1789138968123705,
|
128 |
+
"scr_dir2_threshold_2": 0.1789138968123705,
|
129 |
+
"scr_dir1_threshold_5": 0.5983608719867892,
|
130 |
+
"scr_metric_threshold_5": 0.23642171063794296,
|
131 |
+
"scr_dir2_threshold_5": 0.23642171063794296,
|
132 |
+
"scr_dir1_threshold_10": 0.5901642547952434,
|
133 |
+
"scr_metric_threshold_10": 0.2939297148936762,
|
134 |
+
"scr_dir2_threshold_10": 0.2939297148936762,
|
135 |
+
"scr_dir1_threshold_20": 0.6557376808903568,
|
136 |
+
"scr_metric_threshold_20": 0.39297129893745686,
|
137 |
+
"scr_dir2_threshold_20": 0.39297129893745686,
|
138 |
+
"scr_dir1_threshold_50": 0.6721314038361946,
|
139 |
+
"scr_metric_threshold_50": 0.5942493138325231,
|
140 |
+
"scr_dir2_threshold_50": 0.5942493138325231,
|
141 |
+
"scr_dir1_threshold_100": 0.5901642547952434,
|
142 |
+
"scr_metric_threshold_100": -0.09584654680611486,
|
143 |
+
"scr_dir2_threshold_100": -0.09584654680611486,
|
144 |
+
"scr_dir1_threshold_500": -1.5573772974663136,
|
145 |
+
"scr_metric_threshold_500": -0.37699668403961023,
|
146 |
+
"scr_dir2_threshold_500": -0.37699668403961023
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.3563219965771866,
|
151 |
+
"scr_metric_threshold_2": 0.04385960631655852,
|
152 |
+
"scr_dir2_threshold_2": 0.04385960631655852,
|
153 |
+
"scr_dir1_threshold_5": 0.4482761928120919,
|
154 |
+
"scr_metric_threshold_5": 0.07894739593935524,
|
155 |
+
"scr_dir2_threshold_5": 0.07894739593935524,
|
156 |
+
"scr_dir1_threshold_10": 0.5172414974328899,
|
157 |
+
"scr_metric_threshold_10": 0.0994151425073826,
|
158 |
+
"scr_dir2_threshold_10": 0.0994151425073826,
|
159 |
+
"scr_dir1_threshold_20": 0.49425329092954456,
|
160 |
+
"scr_metric_threshold_20": 0.1520467398002861,
|
161 |
+
"scr_dir2_threshold_20": 0.1520467398002861,
|
162 |
+
"scr_dir1_threshold_50": 0.40229909469463926,
|
163 |
+
"scr_metric_threshold_50": 0.3040934796005722,
|
164 |
+
"scr_dir2_threshold_50": 0.3040934796005722,
|
165 |
+
"scr_dir1_threshold_100": 0.3563219965771866,
|
166 |
+
"scr_metric_threshold_100": 0.31871352265534153,
|
167 |
+
"scr_dir2_threshold_100": 0.31871352265534153,
|
168 |
+
"scr_dir1_threshold_500": -0.3103442133489721,
|
169 |
+
"scr_metric_threshold_500": -0.06432752716716907,
|
170 |
+
"scr_dir2_threshold_500": -0.06432752716716907
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.1374997485428524,
|
175 |
+
"scr_metric_threshold_2": 0.08487084708528918,
|
176 |
+
"scr_dir2_threshold_2": 0.08487084708528918,
|
177 |
+
"scr_dir1_threshold_5": 0.16875009778889075,
|
178 |
+
"scr_metric_threshold_5": 0.12177117713222949,
|
179 |
+
"scr_dir2_threshold_5": 0.12177117713222949,
|
180 |
+
"scr_dir1_threshold_10": 0.18124993946402002,
|
181 |
+
"scr_metric_threshold_10": 0.158671727122486,
|
182 |
+
"scr_dir2_threshold_10": 0.158671727122486,
|
183 |
+
"scr_dir1_threshold_20": -0.13750012107195997,
|
184 |
+
"scr_metric_threshold_20": 0.24354257420777517,
|
185 |
+
"scr_dir2_threshold_20": 0.24354257420777517,
|
186 |
+
"scr_dir1_threshold_50": 0.025000055879366136,
|
187 |
+
"scr_metric_threshold_50": 0.37269372937206624,
|
188 |
+
"scr_dir2_threshold_50": 0.37269372937206624,
|
189 |
+
"scr_dir1_threshold_100": -0.06250032596296912,
|
190 |
+
"scr_metric_threshold_100": 0.4981548955203265,
|
191 |
+
"scr_dir2_threshold_100": 0.4981548955203265,
|
192 |
+
"scr_dir1_threshold_500": 0.3687497997656047,
|
193 |
+
"scr_metric_threshold_500": 0.6125460946204944,
|
194 |
+
"scr_dir2_threshold_500": 0.6125460946204944
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.10067101207053668,
|
199 |
+
"scr_metric_threshold_2": 0.3554504203821872,
|
200 |
+
"scr_dir2_threshold_2": 0.3554504203821872,
|
201 |
+
"scr_dir1_threshold_5": -0.6711404805012869,
|
202 |
+
"scr_metric_threshold_5": 0.3554504203821872,
|
203 |
+
"scr_dir2_threshold_5": 0.3554504203821872,
|
204 |
+
"scr_dir1_threshold_10": -0.7852345341998539,
|
205 |
+
"scr_metric_threshold_10": 0.31279638123578213,
|
206 |
+
"scr_dir2_threshold_10": 0.31279638123578213,
|
207 |
+
"scr_dir1_threshold_20": -0.7449662093778477,
|
208 |
+
"scr_metric_threshold_20": 0.2464456850620891,
|
209 |
+
"scr_dir2_threshold_20": 0.2464456850620891,
|
210 |
+
"scr_dir1_threshold_50": -1.2953017154131161,
|
211 |
+
"scr_metric_threshold_50": 0.199052088521079,
|
212 |
+
"scr_dir2_threshold_50": 0.199052088521079,
|
213 |
+
"scr_dir1_threshold_100": -1.2953017154131161,
|
214 |
+
"scr_metric_threshold_100": 0.0,
|
215 |
+
"scr_dir2_threshold_100": 0.0,
|
216 |
+
"scr_dir1_threshold_500": -1.7651001717733297,
|
217 |
+
"scr_metric_threshold_500": -0.20853063833742044,
|
218 |
+
"scr_dir2_threshold_500": -0.20853063833742044
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.12318853725030839,
|
223 |
+
"scr_metric_threshold_2": 0.08490568690131776,
|
224 |
+
"scr_dir2_threshold_2": 0.08490568690131776,
|
225 |
+
"scr_dir1_threshold_5": 0.20289851316657856,
|
226 |
+
"scr_metric_threshold_5": 0.18867905431051207,
|
227 |
+
"scr_dir2_threshold_5": 0.18867905431051207,
|
228 |
+
"scr_dir1_threshold_10": 0.28260848908284875,
|
229 |
+
"scr_metric_threshold_10": 0.23584909904210843,
|
230 |
+
"scr_dir2_threshold_10": 0.23584909904210843,
|
231 |
+
"scr_dir1_threshold_20": 0.2681159232499559,
|
232 |
+
"scr_metric_threshold_20": 0.3113206645355196,
|
233 |
+
"scr_dir2_threshold_20": 0.3113206645355196,
|
234 |
+
"scr_dir1_threshold_50": 0.10869553949973566,
|
235 |
+
"scr_metric_threshold_50": 0.4009432715638065,
|
236 |
+
"scr_dir2_threshold_50": 0.4009432715638065,
|
237 |
+
"scr_dir1_threshold_100": 0.06521741008337739,
|
238 |
+
"scr_metric_threshold_100": 0.3820753099019616,
|
239 |
+
"scr_dir2_threshold_100": 0.3820753099019616,
|
240 |
+
"scr_dir1_threshold_500": -0.16666666666666666,
|
241 |
+
"scr_metric_threshold_500": 0.2924527028736747,
|
242 |
+
"scr_dir2_threshold_500": 0.2924527028736747
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.11811009209619154,
|
247 |
+
"scr_metric_threshold_2": 0.08032120438718889,
|
248 |
+
"scr_dir2_threshold_2": 0.08032120438718889,
|
249 |
+
"scr_dir1_threshold_5": 0.20472453509562788,
|
250 |
+
"scr_metric_threshold_5": 0.06024090329039166,
|
251 |
+
"scr_dir2_threshold_5": 0.06024090329039166,
|
252 |
+
"scr_dir1_threshold_10": 0.2834645964930889,
|
253 |
+
"scr_metric_threshold_10": 0.1767069369031008,
|
254 |
+
"scr_dir2_threshold_10": 0.1767069369031008,
|
255 |
+
"scr_dir1_threshold_20": 0.3149607149176306,
|
256 |
+
"scr_metric_threshold_20": 0.3493976220859853,
|
257 |
+
"scr_dir2_threshold_20": 0.3493976220859853,
|
258 |
+
"scr_dir1_threshold_50": 0.1968506228214391,
|
259 |
+
"scr_metric_threshold_50": 0.377509947871073,
|
260 |
+
"scr_dir2_threshold_50": 0.377509947871073,
|
261 |
+
"scr_dir1_threshold_100": 0.1968506228214391,
|
262 |
+
"scr_metric_threshold_100": 0.4016065006880865,
|
263 |
+
"scr_dir2_threshold_100": 0.4016065006880865,
|
264 |
+
"scr_dir1_threshold_500": 0.007873912274188802,
|
265 |
+
"scr_metric_threshold_500": 0.2690764176987964,
|
266 |
+
"scr_dir2_threshold_500": 0.2690764176987964
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.018181942973895696,
|
271 |
+
"scr_metric_threshold_2": 0.06666679912142758,
|
272 |
+
"scr_dir2_threshold_2": 0.06666679912142758,
|
273 |
+
"scr_dir1_threshold_5": 0.08484846514447261,
|
274 |
+
"scr_metric_threshold_5": 0.10555551692291695,
|
275 |
+
"scr_dir2_threshold_5": 0.10555551692291695,
|
276 |
+
"scr_dir1_threshold_10": 0.1333334055813782,
|
277 |
+
"scr_metric_threshold_10": 0.06666679912142758,
|
278 |
+
"scr_dir2_threshold_10": 0.06666679912142758,
|
279 |
+
"scr_dir1_threshold_20": 0.09090899238902972,
|
280 |
+
"scr_metric_threshold_20": 0.04444453274761838,
|
281 |
+
"scr_dir2_threshold_20": 0.04444453274761838,
|
282 |
+
"scr_dir1_threshold_50": 0.18181798477805944,
|
283 |
+
"scr_metric_threshold_50": 0.06666679912142758,
|
284 |
+
"scr_dir2_threshold_50": 0.06666679912142758,
|
285 |
+
"scr_dir1_threshold_100": 0.24242434094430362,
|
286 |
+
"scr_metric_threshold_100": 0.06666679912142758,
|
287 |
+
"scr_dir2_threshold_100": 0.06666679912142758,
|
288 |
+
"scr_dir1_threshold_500": -0.036363524707567006,
|
289 |
+
"scr_metric_threshold_500": -0.18888876747202474,
|
290 |
+
"scr_dir2_threshold_500": -0.18888876747202474
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_6",
|
296 |
+
"sae_lens_version": "5.3.1",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "batch_topk",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_7_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 16,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": true,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "cfbe2419-d96a-45c0-8b2c-29db79a74aa0",
|
73 |
+
"datetime_epoch_millis": 1737045160596,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.33575863048084714,
|
77 |
+
"scr_metric_threshold_2": 0.12181007044421421,
|
78 |
+
"scr_dir2_threshold_2": 0.12181007044421421,
|
79 |
+
"scr_dir1_threshold_5": 0.3885379266950901,
|
80 |
+
"scr_metric_threshold_5": 0.1913604565509749,
|
81 |
+
"scr_dir2_threshold_5": 0.1913604565509749,
|
82 |
+
"scr_dir1_threshold_10": 0.3308613577670798,
|
83 |
+
"scr_metric_threshold_10": 0.23620956534644144,
|
84 |
+
"scr_dir2_threshold_10": 0.23620956534644144,
|
85 |
+
"scr_dir1_threshold_20": 0.3835861992997974,
|
86 |
+
"scr_metric_threshold_20": 0.3168358653314513,
|
87 |
+
"scr_dir2_threshold_20": 0.3168358653314513,
|
88 |
+
"scr_dir1_threshold_50": 0.416389770776615,
|
89 |
+
"scr_metric_threshold_50": 0.3449542160401062,
|
90 |
+
"scr_dir2_threshold_50": 0.3449542160401062,
|
91 |
+
"scr_dir1_threshold_100": 0.28868572840067236,
|
92 |
+
"scr_metric_threshold_100": 0.31900623554950347,
|
93 |
+
"scr_dir2_threshold_100": 0.31900623554950347,
|
94 |
+
"scr_dir1_threshold_500": -0.1273483204514781,
|
95 |
+
"scr_metric_threshold_500": -0.042573089301926915,
|
96 |
+
"scr_dir2_threshold_500": -0.042573089301926915
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.5882351703710352,
|
103 |
+
"scr_metric_threshold_2": 0.008450594971853177,
|
104 |
+
"scr_dir2_threshold_2": 0.008450594971853177,
|
105 |
+
"scr_dir1_threshold_5": 0.5764706212343906,
|
106 |
+
"scr_metric_threshold_5": 0.04507034355108,
|
107 |
+
"scr_dir2_threshold_5": 0.04507034355108,
|
108 |
+
"scr_dir1_threshold_10": 0.4823528256796329,
|
109 |
+
"scr_metric_threshold_10": 0.08732382211154228,
|
110 |
+
"scr_dir2_threshold_10": 0.08732382211154228,
|
111 |
+
"scr_dir1_threshold_20": 0.5529415229611013,
|
112 |
+
"scr_metric_threshold_20": 0.16901408214224792,
|
113 |
+
"scr_dir2_threshold_20": 0.16901408214224792,
|
114 |
+
"scr_dir1_threshold_50": 0.6470586172850588,
|
115 |
+
"scr_metric_threshold_50": -0.014084492853487425,
|
116 |
+
"scr_dir2_threshold_50": -0.014084492853487425,
|
117 |
+
"scr_dir1_threshold_100": 0.6941175150624376,
|
118 |
+
"scr_metric_threshold_100": 0.09859144997441198,
|
119 |
+
"scr_dir2_threshold_100": 0.09859144997441198,
|
120 |
+
"scr_dir1_threshold_500": 0.10588234469140236,
|
121 |
+
"scr_metric_threshold_500": 0.04225347856046228,
|
122 |
+
"scr_dir2_threshold_500": 0.04225347856046228
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.7704917872602378,
|
127 |
+
"scr_metric_threshold_2": 0.1501598946845039,
|
128 |
+
"scr_dir2_threshold_2": 0.1501598946845039,
|
129 |
+
"scr_dir1_threshold_5": 0.7786888930145298,
|
130 |
+
"scr_metric_threshold_5": 0.30031959893884697,
|
131 |
+
"scr_dir2_threshold_5": 0.30031959893884697,
|
132 |
+
"scr_dir1_threshold_10": 0.8114753617807134,
|
133 |
+
"scr_metric_threshold_10": 0.3578276031945802,
|
134 |
+
"scr_dir2_threshold_10": 0.3578276031945802,
|
135 |
+
"scr_dir1_threshold_20": 0.8114753617807134,
|
136 |
+
"scr_metric_threshold_20": 0.4696485744683809,
|
137 |
+
"scr_dir2_threshold_20": 0.4696485744683809,
|
138 |
+
"scr_dir1_threshold_50": 0.8032787445891676,
|
139 |
+
"scr_metric_threshold_50": 0.5782748893648373,
|
140 |
+
"scr_dir2_threshold_50": 0.5782748893648373,
|
141 |
+
"scr_dir1_threshold_100": 0.8196724675350056,
|
142 |
+
"scr_metric_threshold_100": 0.057508004255733235,
|
143 |
+
"scr_dir2_threshold_100": 0.057508004255733235,
|
144 |
+
"scr_dir1_threshold_500": -0.22950821273976216,
|
145 |
+
"scr_metric_threshold_500": -0.36741214361709523,
|
146 |
+
"scr_dir2_threshold_500": -0.36741214361709523
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.5517244922986699,
|
151 |
+
"scr_metric_threshold_2": 0.06725146606508967,
|
152 |
+
"scr_dir2_threshold_2": 0.06725146606508967,
|
153 |
+
"scr_dir1_threshold_5": 0.5287356006845627,
|
154 |
+
"scr_metric_threshold_5": 0.10233925568788639,
|
155 |
+
"scr_dir2_threshold_5": 0.10233925568788639,
|
156 |
+
"scr_dir1_threshold_10": 0.4827591876778719,
|
157 |
+
"scr_metric_threshold_10": 0.1461988620044449,
|
158 |
+
"scr_dir2_threshold_10": 0.1461988620044449,
|
159 |
+
"scr_dir1_threshold_20": 0.4252873011979847,
|
160 |
+
"scr_metric_threshold_20": 0.19590634611684463,
|
161 |
+
"scr_dir2_threshold_20": 0.19590634611684463,
|
162 |
+
"scr_dir1_threshold_50": 0.5517244922986699,
|
163 |
+
"scr_metric_threshold_50": 0.3625731289719001,
|
164 |
+
"scr_dir2_threshold_50": 0.3625731289719001,
|
165 |
+
"scr_dir1_threshold_100": 0.310344898459734,
|
166 |
+
"scr_metric_threshold_100": 0.3713449456656619,
|
167 |
+
"scr_dir2_threshold_100": 0.3713449456656619,
|
168 |
+
"scr_dir1_threshold_500": -0.05747120136912535,
|
169 |
+
"scr_metric_threshold_500": 0.07894739593935524,
|
170 |
+
"scr_dir2_threshold_500": 0.07894739593935524
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.39374985564497084,
|
175 |
+
"scr_metric_threshold_2": 0.13653135313966885,
|
176 |
+
"scr_dir2_threshold_2": 0.13653135313966885,
|
177 |
+
"scr_dir1_threshold_5": 0.32499998137354463,
|
178 |
+
"scr_metric_threshold_5": 0.18081188116198693,
|
179 |
+
"scr_dir2_threshold_5": 0.18081188116198693,
|
180 |
+
"scr_dir1_threshold_10": 0.40000014901164305,
|
181 |
+
"scr_metric_threshold_10": 0.29151287130280784,
|
182 |
+
"scr_dir2_threshold_10": 0.29151287130280784,
|
183 |
+
"scr_dir1_threshold_20": 0.4625001024455046,
|
184 |
+
"scr_metric_threshold_20": 0.468634763448764,
|
185 |
+
"scr_dir2_threshold_20": 0.468634763448764,
|
186 |
+
"scr_dir1_threshold_50": 0.4625001024455046,
|
187 |
+
"scr_metric_threshold_50": 0.5239852585191744,
|
188 |
+
"scr_dir2_threshold_50": 0.5239852585191744,
|
189 |
+
"scr_dir1_threshold_100": 0.4437499674037031,
|
190 |
+
"scr_metric_threshold_100": 0.6678965896909048,
|
191 |
+
"scr_dir2_threshold_100": 0.6678965896909048,
|
192 |
+
"scr_dir1_threshold_500": 0.40000014901164305,
|
193 |
+
"scr_metric_threshold_500": -0.19926182624214087,
|
194 |
+
"scr_dir2_threshold_500": -0.19926182624214087
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.0939596912720427,
|
199 |
+
"scr_metric_threshold_2": 0.36492897019852866,
|
200 |
+
"scr_dir2_threshold_2": 0.36492897019852866,
|
201 |
+
"scr_dir1_threshold_5": 0.1946311033736218,
|
202 |
+
"scr_metric_threshold_5": 0.3554504203821872,
|
203 |
+
"scr_dir2_threshold_5": 0.3554504203821872,
|
204 |
+
"scr_dir1_threshold_10": -0.45637581476322564,
|
205 |
+
"scr_metric_threshold_10": 0.38388635231764584,
|
206 |
+
"scr_dir2_threshold_10": 0.38388635231764584,
|
207 |
+
"scr_dir1_threshold_20": -0.24161074899412194,
|
208 |
+
"scr_metric_threshold_20": 0.41232228425310447,
|
209 |
+
"scr_dir2_threshold_20": 0.41232228425310447,
|
210 |
+
"scr_dir1_threshold_50": -0.12080537449706097,
|
211 |
+
"scr_metric_threshold_50": 0.31279638123578213,
|
212 |
+
"scr_dir2_threshold_50": 0.31279638123578213,
|
213 |
+
"scr_dir1_threshold_100": -1.2013420241410733,
|
214 |
+
"scr_metric_threshold_100": 0.29857827402483567,
|
215 |
+
"scr_dir2_threshold_100": 0.29857827402483567,
|
216 |
+
"scr_dir1_threshold_500": -1.442952373104153,
|
217 |
+
"scr_metric_threshold_500": -0.13744066725555673,
|
218 |
+
"scr_dir2_threshold_500": -0.13744066725555673
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.13768110308320117,
|
223 |
+
"scr_metric_threshold_2": 0.07547156549341116,
|
224 |
+
"scr_dir2_threshold_2": 0.07547156549341116,
|
225 |
+
"scr_dir1_threshold_5": 0.3043477697498678,
|
226 |
+
"scr_metric_threshold_5": 0.1556603322677598,
|
227 |
+
"scr_dir2_threshold_5": 0.1556603322677598,
|
228 |
+
"scr_dir1_threshold_10": 0.4492751557495154,
|
229 |
+
"scr_metric_threshold_10": 0.2452829392960467,
|
230 |
+
"scr_dir2_threshold_10": 0.2452829392960467,
|
231 |
+
"scr_dir1_threshold_20": 0.4565214386659618,
|
232 |
+
"scr_metric_threshold_20": 0.37264146964802336,
|
233 |
+
"scr_dir2_threshold_20": 0.37264146964802336,
|
234 |
+
"scr_dir1_threshold_50": 0.4202895921660499,
|
235 |
+
"scr_metric_threshold_50": 0.4056601916907756,
|
236 |
+
"scr_dir2_threshold_50": 0.4056601916907756,
|
237 |
+
"scr_dir1_threshold_100": 0.4347825899166226,
|
238 |
+
"scr_metric_threshold_100": 0.5047169201269691,
|
239 |
+
"scr_dir2_threshold_100": 0.5047169201269691,
|
240 |
+
"scr_dir1_threshold_500": 0.22463779383359767,
|
241 |
+
"scr_metric_threshold_500": 0.4339622747605271,
|
242 |
+
"scr_dir2_threshold_500": 0.4339622747605271
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.12598447369816684,
|
247 |
+
"scr_metric_threshold_2": 0.0883534684515504,
|
248 |
+
"scr_dir2_threshold_2": 0.0883534684515504,
|
249 |
+
"scr_dir1_threshold_5": 0.29133850876727774,
|
250 |
+
"scr_metric_threshold_5": 0.19678723799989803,
|
251 |
+
"scr_dir2_threshold_5": 0.19678723799989803,
|
252 |
+
"scr_dir1_threshold_10": 0.2834645964930889,
|
253 |
+
"scr_metric_threshold_10": 0.1887549739355365,
|
254 |
+
"scr_dir2_threshold_10": 0.1887549739355365,
|
255 |
+
"scr_dir1_threshold_20": 0.40157468858928047,
|
256 |
+
"scr_metric_threshold_20": 0.2409638525376377,
|
257 |
+
"scr_dir2_threshold_20": 0.2409638525376377,
|
258 |
+
"scr_dir1_threshold_50": 0.33070853946600826,
|
259 |
+
"scr_metric_threshold_50": 0.3293173209891881,
|
260 |
+
"scr_dir2_threshold_50": 0.3293173209891881,
|
261 |
+
"scr_dir1_threshold_100": 0.5354330745616361,
|
262 |
+
"scr_metric_threshold_100": 0.3534136344301306,
|
263 |
+
"scr_dir2_threshold_100": 0.3534136344301306,
|
264 |
+
"scr_dir1_threshold_500": -0.03149611842454171,
|
265 |
+
"scr_metric_threshold_500": 0.03614458984944921,
|
266 |
+
"scr_dir2_threshold_500": 0.03614458984944921
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.0242424702184528,
|
271 |
+
"scr_metric_threshold_2": 0.08333325054910777,
|
272 |
+
"scr_dir2_threshold_2": 0.08333325054910777,
|
273 |
+
"scr_dir1_threshold_5": 0.10909093536292541,
|
274 |
+
"scr_metric_threshold_5": 0.1944445824181537,
|
275 |
+
"scr_dir2_threshold_5": 0.1944445824181537,
|
276 |
+
"scr_dir1_threshold_10": 0.19393940050739802,
|
277 |
+
"scr_metric_threshold_10": 0.188889098608927,
|
278 |
+
"scr_dir2_threshold_10": 0.188889098608927,
|
279 |
+
"scr_dir1_threshold_20": 0.19999992775195513,
|
280 |
+
"scr_metric_threshold_20": 0.2055555500366072,
|
281 |
+
"scr_dir2_threshold_20": 0.2055555500366072,
|
282 |
+
"scr_dir1_threshold_50": 0.23636345245952212,
|
283 |
+
"scr_metric_threshold_50": 0.261111050402679,
|
284 |
+
"scr_dir2_threshold_50": 0.261111050402679,
|
285 |
+
"scr_dir1_threshold_100": 0.2727273384073135,
|
286 |
+
"scr_metric_threshold_100": 0.20000006622738045,
|
287 |
+
"scr_dir2_threshold_100": 0.20000006622738045,
|
288 |
+
"scr_dir1_threshold_500": 0.012121054489114207,
|
289 |
+
"scr_metric_threshold_500": -0.22777781641041636,
|
290 |
+
"scr_dir2_threshold_500": -0.22777781641041636
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_7",
|
296 |
+
"sae_lens_version": "5.3.1",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "batch_topk",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_8_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 16,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": true,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "c6a5c973-4962-4595-922c-c045cd6bb9df",
|
73 |
+
"datetime_epoch_millis": 1737044719005,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.3622960391354135,
|
77 |
+
"scr_metric_threshold_2": 0.12141758585616391,
|
78 |
+
"scr_dir2_threshold_2": 0.12141758585616391,
|
79 |
+
"scr_dir1_threshold_5": 0.4397710993857653,
|
80 |
+
"scr_metric_threshold_5": 0.18718188597830604,
|
81 |
+
"scr_dir2_threshold_5": 0.18718188597830604,
|
82 |
+
"scr_dir1_threshold_10": 0.4162606719061542,
|
83 |
+
"scr_metric_threshold_10": 0.24846962065560566,
|
84 |
+
"scr_dir2_threshold_10": 0.24846962065560566,
|
85 |
+
"scr_dir1_threshold_20": 0.394481146287565,
|
86 |
+
"scr_metric_threshold_20": 0.30987621061533305,
|
87 |
+
"scr_dir2_threshold_20": 0.30987621061533305,
|
88 |
+
"scr_dir1_threshold_50": 0.4386385564685014,
|
89 |
+
"scr_metric_threshold_50": 0.36000353359436194,
|
90 |
+
"scr_dir2_threshold_50": 0.36000353359436194,
|
91 |
+
"scr_dir1_threshold_100": 0.04099916376390976,
|
92 |
+
"scr_metric_threshold_100": 0.41111722443788556,
|
93 |
+
"scr_dir2_threshold_100": 0.41111722443788556,
|
94 |
+
"scr_dir1_threshold_500": -0.714998186454576,
|
95 |
+
"scr_metric_threshold_500": 0.01782407921393761,
|
96 |
+
"scr_dir2_threshold_500": 0.01782407921393761
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.5882351703710352,
|
103 |
+
"scr_metric_threshold_2": 0.008450594971853177,
|
104 |
+
"scr_dir2_threshold_2": 0.008450594971853177,
|
105 |
+
"scr_dir1_threshold_5": 0.635294068148414,
|
106 |
+
"scr_metric_threshold_5": 0.03661974857922683,
|
107 |
+
"scr_dir2_threshold_5": 0.03661974857922683,
|
108 |
+
"scr_dir1_threshold_10": 0.6470586172850588,
|
109 |
+
"scr_metric_threshold_10": 0.0563379714139497,
|
110 |
+
"scr_dir2_threshold_10": 0.0563379714139497,
|
111 |
+
"scr_dir1_threshold_20": 0.6588231664217034,
|
112 |
+
"scr_metric_threshold_20": 0.13802806354425654,
|
113 |
+
"scr_dir2_threshold_20": 0.13802806354425654,
|
114 |
+
"scr_dir1_threshold_50": 0.6588231664217034,
|
115 |
+
"scr_metric_threshold_50": -0.09577475288419306,
|
116 |
+
"scr_dir2_threshold_50": -0.09577475288419306,
|
117 |
+
"scr_dir1_threshold_100": 0.08235324641811308,
|
118 |
+
"scr_metric_threshold_100": -0.07042263216783592,
|
119 |
+
"scr_dir2_threshold_100": -0.07042263216783592,
|
120 |
+
"scr_dir1_threshold_500": 0.5176471743203671,
|
121 |
+
"scr_metric_threshold_500": 0.09859144997441198,
|
122 |
+
"scr_dir2_threshold_500": 0.09859144997441198
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.8114753617807134,
|
127 |
+
"scr_metric_threshold_2": 0.11821085531897148,
|
128 |
+
"scr_dir2_threshold_2": 0.11821085531897148,
|
129 |
+
"scr_dir1_threshold_5": 0.8278690847265514,
|
130 |
+
"scr_metric_threshold_5": 0.1853035904273805,
|
131 |
+
"scr_dir2_threshold_5": 0.1853035904273805,
|
132 |
+
"scr_dir1_threshold_10": 0.8524589363011892,
|
133 |
+
"scr_metric_threshold_10": 0.2587860191507996,
|
134 |
+
"scr_dir2_threshold_10": 0.2587860191507996,
|
135 |
+
"scr_dir1_threshold_20": 0.8688526592470271,
|
136 |
+
"scr_metric_threshold_20": 0.3386581414892286,
|
137 |
+
"scr_dir2_threshold_20": 0.3386581414892286,
|
138 |
+
"scr_dir1_threshold_50": 0.8524589363011892,
|
139 |
+
"scr_metric_threshold_50": 0.3578276031945802,
|
140 |
+
"scr_dir2_threshold_50": 0.3578276031945802,
|
141 |
+
"scr_dir1_threshold_100": 0.6065574891783352,
|
142 |
+
"scr_metric_threshold_100": 0.5271565787241141,
|
143 |
+
"scr_dir2_threshold_100": 0.5271565787241141,
|
144 |
+
"scr_dir1_threshold_500": -1.770492275822984,
|
145 |
+
"scr_metric_threshold_500": -0.41533541702015264,
|
146 |
+
"scr_dir2_threshold_500": -0.41533541702015264
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.5747126988020154,
|
151 |
+
"scr_metric_threshold_2": 0.0906433258136208,
|
152 |
+
"scr_dir2_threshold_2": 0.0906433258136208,
|
153 |
+
"scr_dir1_threshold_5": 0.5632185955503426,
|
154 |
+
"scr_metric_threshold_5": 0.10526319458580699,
|
155 |
+
"scr_dir2_threshold_5": 0.10526319458580699,
|
156 |
+
"scr_dir1_threshold_10": 0.5287356006845627,
|
157 |
+
"scr_metric_threshold_10": 0.14912280090236552,
|
158 |
+
"scr_dir2_threshold_10": 0.14912280090236552,
|
159 |
+
"scr_dir1_threshold_20": 0.3793108881912939,
|
160 |
+
"scr_metric_threshold_20": 0.21052638917161398,
|
161 |
+
"scr_dir2_threshold_20": 0.21052638917161398,
|
162 |
+
"scr_dir1_threshold_50": 0.2758625887047159,
|
163 |
+
"scr_metric_threshold_50": 0.307017592781076,
|
164 |
+
"scr_dir2_threshold_50": 0.307017592781076,
|
165 |
+
"scr_dir1_threshold_100": -0.275861903593954,
|
166 |
+
"scr_metric_threshold_100": 0.201754398195269,
|
167 |
+
"scr_dir2_threshold_100": 0.201754398195269,
|
168 |
+
"scr_dir1_threshold_500": -2.689654416429504,
|
169 |
+
"scr_metric_threshold_500": -0.20467833709318958,
|
170 |
+
"scr_dir2_threshold_500": -0.20467833709318958
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.4062500698492077,
|
175 |
+
"scr_metric_threshold_2": 0.12915137510760727,
|
176 |
+
"scr_dir2_threshold_2": 0.12915137510760727,
|
177 |
+
"scr_dir1_threshold_5": 0.4124999906867723,
|
178 |
+
"scr_metric_threshold_5": 0.1918818482100793,
|
179 |
+
"scr_dir2_threshold_5": 0.1918818482100793,
|
180 |
+
"scr_dir1_threshold_10": 0.38125001396984154,
|
181 |
+
"scr_metric_threshold_10": 0.3542435643485961,
|
182 |
+
"scr_dir2_threshold_10": 0.3542435643485961,
|
183 |
+
"scr_dir1_threshold_20": -0.10000022351746454,
|
184 |
+
"scr_metric_threshold_20": 0.4095940594190065,
|
185 |
+
"scr_dir2_threshold_20": 0.4095940594190065,
|
186 |
+
"scr_dir1_threshold_50": -0.012500214204236849,
|
187 |
+
"scr_metric_threshold_50": 0.509225082511735,
|
188 |
+
"scr_dir2_threshold_50": 0.509225082511735,
|
189 |
+
"scr_dir1_threshold_100": -0.5062502933666722,
|
190 |
+
"scr_metric_threshold_100": 0.6383764576193424,
|
191 |
+
"scr_dir2_threshold_100": 0.6383764576193424,
|
192 |
+
"scr_dir1_threshold_500": -1.075000540167206,
|
193 |
+
"scr_metric_threshold_500": -0.3837636964201586,
|
194 |
+
"scr_dir2_threshold_500": -0.3837636964201586
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.12751669529555493,
|
199 |
+
"scr_metric_threshold_2": 0.3933649021339873,
|
200 |
+
"scr_dir2_threshold_2": 0.3933649021339873,
|
201 |
+
"scr_dir1_threshold_5": 0.20805374497060972,
|
202 |
+
"scr_metric_threshold_5": 0.45497632339950955,
|
203 |
+
"scr_dir2_threshold_5": 0.45497632339950955,
|
204 |
+
"scr_dir1_threshold_10": -0.1140936536675246,
|
205 |
+
"scr_metric_threshold_10": 0.49763036254591464,
|
206 |
+
"scr_dir2_threshold_10": 0.49763036254591464,
|
207 |
+
"scr_dir1_threshold_20": 0.0939596912720427,
|
208 |
+
"scr_metric_threshold_20": 0.4644551557022853,
|
209 |
+
"scr_dir2_threshold_20": 0.4644551557022853,
|
210 |
+
"scr_dir1_threshold_50": 0.24161074899412194,
|
211 |
+
"scr_metric_threshold_50": 0.7156398331861108,
|
212 |
+
"scr_dir2_threshold_50": 0.7156398331861108,
|
213 |
+
"scr_dir1_threshold_100": -0.32885871943662826,
|
214 |
+
"scr_metric_threshold_100": 0.7772512544516331,
|
215 |
+
"scr_dir2_threshold_100": 0.7772512544516331,
|
216 |
+
"scr_dir1_threshold_500": -0.6979861637263052,
|
217 |
+
"scr_metric_threshold_500": 0.601895822957842,
|
218 |
+
"scr_dir2_threshold_500": 0.601895822957842
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.18115923249955945,
|
223 |
+
"scr_metric_threshold_2": 0.09905644728222517,
|
224 |
+
"scr_dir2_threshold_2": 0.09905644728222517,
|
225 |
+
"scr_dir1_threshold_5": 0.34782589916622614,
|
226 |
+
"scr_metric_threshold_5": 0.20754701597235695,
|
227 |
+
"scr_dir2_threshold_5": 0.20754701597235695,
|
228 |
+
"scr_dir1_threshold_10": 0.3985507434167107,
|
229 |
+
"scr_metric_threshold_10": 0.2924527028736747,
|
230 |
+
"scr_dir2_threshold_10": 0.2924527028736747,
|
231 |
+
"scr_dir1_threshold_20": 0.5,
|
232 |
+
"scr_metric_threshold_20": 0.3915094313098682,
|
233 |
+
"scr_dir2_threshold_20": 0.3915094313098682,
|
234 |
+
"scr_dir1_threshold_50": 0.5362318464999118,
|
235 |
+
"scr_metric_threshold_50": 0.46226407667631025,
|
236 |
+
"scr_dir2_threshold_50": 0.46226407667631025,
|
237 |
+
"scr_dir1_threshold_100": 0.6014492565832893,
|
238 |
+
"scr_metric_threshold_100": 0.5943395271552561,
|
239 |
+
"scr_dir2_threshold_100": 0.5943395271552561,
|
240 |
+
"scr_dir1_threshold_500": 0.10869553949973566,
|
241 |
+
"scr_metric_threshold_500": 0.5849056869013177,
|
242 |
+
"scr_dir2_threshold_500": 0.5849056869013177
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.11811009209619154,
|
247 |
+
"scr_metric_threshold_2": 0.06024090329039166,
|
248 |
+
"scr_dir2_threshold_2": 0.06024090329039166,
|
249 |
+
"scr_dir1_threshold_5": 0.37795295176671406,
|
250 |
+
"scr_metric_threshold_5": 0.1325300829892901,
|
251 |
+
"scr_dir2_threshold_5": 0.1325300829892901,
|
252 |
+
"scr_dir1_threshold_10": 0.39370077631509165,
|
253 |
+
"scr_metric_threshold_10": 0.18473896159139128,
|
254 |
+
"scr_dir2_threshold_10": 0.18473896159139128,
|
255 |
+
"scr_dir1_threshold_20": 0.40944860086346924,
|
256 |
+
"scr_metric_threshold_20": 0.33734934567747854,
|
257 |
+
"scr_dir2_threshold_20": 0.33734934567747854,
|
258 |
+
"scr_dir1_threshold_50": 0.5748031052603666,
|
259 |
+
"scr_metric_threshold_50": 0.4016065006880865,
|
260 |
+
"scr_dir2_threshold_50": 0.4016065006880865,
|
261 |
+
"scr_dir1_threshold_100": -0.19685015349365256,
|
262 |
+
"scr_metric_threshold_100": 0.4538153792901877,
|
263 |
+
"scr_dir2_threshold_100": 0.4538153792901877,
|
264 |
+
"scr_dir1_threshold_500": -0.2283462719181943,
|
265 |
+
"scr_metric_threshold_500": 0.1887549739355365,
|
266 |
+
"scr_dir2_threshold_500": 0.1887549739355365
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.09090899238902972,
|
271 |
+
"scr_metric_threshold_2": 0.0722222829306543,
|
272 |
+
"scr_dir2_threshold_2": 0.0722222829306543,
|
273 |
+
"scr_dir1_threshold_5": 0.14545446007049243,
|
274 |
+
"scr_metric_threshold_5": 0.183333283662798,
|
275 |
+
"scr_dir2_threshold_5": 0.183333283662798,
|
276 |
+
"scr_dir1_threshold_10": 0.24242434094430362,
|
277 |
+
"scr_metric_threshold_10": 0.1944445824181537,
|
278 |
+
"scr_dir2_threshold_10": 0.1944445824181537,
|
279 |
+
"scr_dir1_threshold_20": 0.34545438782244753,
|
280 |
+
"scr_metric_threshold_20": 0.188889098608927,
|
281 |
+
"scr_dir2_threshold_20": 0.188889098608927,
|
282 |
+
"scr_dir1_threshold_50": 0.3818182737702389,
|
283 |
+
"scr_metric_threshold_50": 0.22222233260118965,
|
284 |
+
"scr_dir2_threshold_50": 0.22222233260118965,
|
285 |
+
"scr_dir1_threshold_100": 0.34545438782244753,
|
286 |
+
"scr_metric_threshold_100": 0.1666668322351178,
|
287 |
+
"scr_dir2_threshold_100": 0.1666668322351178,
|
288 |
+
"scr_dir1_threshold_500": 0.11515146260748252,
|
289 |
+
"scr_metric_threshold_500": -0.3277778495241066,
|
290 |
+
"scr_dir2_threshold_500": -0.3277778495241066
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_8",
|
296 |
+
"sae_lens_version": "5.3.1",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "batch_topk",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_9_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 16,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": true,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "1d7f4b77-cc01-449e-b588-46d25760a5e1",
|
73 |
+
"datetime_epoch_millis": 1737044429375,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.3924356238013567,
|
77 |
+
"scr_metric_threshold_2": 0.09312123015678658,
|
78 |
+
"scr_dir2_threshold_2": 0.09312123015678658,
|
79 |
+
"scr_dir1_threshold_5": 0.09127440675677788,
|
80 |
+
"scr_metric_threshold_5": 0.16779011897950058,
|
81 |
+
"scr_dir2_threshold_5": 0.16779011897950058,
|
82 |
+
"scr_dir1_threshold_10": -0.0719223214825767,
|
83 |
+
"scr_metric_threshold_10": 0.15835462580901422,
|
84 |
+
"scr_dir2_threshold_10": 0.15835462580901422,
|
85 |
+
"scr_dir1_threshold_20": -0.36474667663034044,
|
86 |
+
"scr_metric_threshold_20": 0.21446733289857983,
|
87 |
+
"scr_dir2_threshold_20": 0.21446733289857983,
|
88 |
+
"scr_dir1_threshold_50": -0.39899350534424927,
|
89 |
+
"scr_metric_threshold_50": 0.1478027229420425,
|
90 |
+
"scr_dir2_threshold_50": 0.1478027229420425,
|
91 |
+
"scr_dir1_threshold_100": -0.5410074913099223,
|
92 |
+
"scr_metric_threshold_100": 0.2188830729582443,
|
93 |
+
"scr_dir2_threshold_100": 0.2188830729582443,
|
94 |
+
"scr_dir1_threshold_500": -1.40447279806404,
|
95 |
+
"scr_metric_threshold_500": -0.09401004862732026,
|
96 |
+
"scr_dir2_threshold_500": -0.09401004862732026
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.6117649698751249,
|
103 |
+
"scr_metric_threshold_2": 0.0112676278628697,
|
104 |
+
"scr_dir2_threshold_2": 0.0112676278628697,
|
105 |
+
"scr_dir1_threshold_5": 0.5764706212343906,
|
106 |
+
"scr_metric_threshold_5": 0.019718222834722878,
|
107 |
+
"scr_dir2_threshold_5": 0.019718222834722878,
|
108 |
+
"scr_dir1_threshold_10": 0.03529434864073422,
|
109 |
+
"scr_metric_threshold_10": 0.06478873428620167,
|
110 |
+
"scr_dir2_threshold_10": 0.06478873428620167,
|
111 |
+
"scr_dir1_threshold_20": 0.16470579160542584,
|
112 |
+
"scr_metric_threshold_20": 0.10704221284666396,
|
113 |
+
"scr_dir2_threshold_20": 0.10704221284666396,
|
114 |
+
"scr_dir1_threshold_50": 0.7294118637031718,
|
115 |
+
"scr_metric_threshold_50": 0.05070424143271425,
|
116 |
+
"scr_dir2_threshold_50": 0.05070424143271425,
|
117 |
+
"scr_dir1_threshold_100": -0.9999992987691997,
|
118 |
+
"scr_metric_threshold_100": 0.02816898570697485,
|
119 |
+
"scr_dir2_threshold_100": 0.02816898570697485,
|
120 |
+
"scr_dir1_threshold_500": -0.9882347496325551,
|
121 |
+
"scr_metric_threshold_500": 0.1661972171516302,
|
122 |
+
"scr_dir2_threshold_500": 0.1661972171516302
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.8196724675350056,
|
127 |
+
"scr_metric_threshold_2": 0.057508004255733235,
|
128 |
+
"scr_dir2_threshold_2": 0.057508004255733235,
|
129 |
+
"scr_dir1_threshold_5": -1.2213115955482163,
|
130 |
+
"scr_metric_threshold_5": 0.009584730852675801,
|
131 |
+
"scr_dir2_threshold_5": 0.009584730852675801,
|
132 |
+
"scr_dir1_threshold_10": -0.5,
|
133 |
+
"scr_metric_threshold_10": 0.08306715957609484,
|
134 |
+
"scr_dir2_threshold_10": 0.08306715957609484,
|
135 |
+
"scr_dir1_threshold_20": -1.508197105754292,
|
136 |
+
"scr_metric_threshold_20": 0.31309917659902775,
|
137 |
+
"scr_dir2_threshold_20": 0.31309917659902775,
|
138 |
+
"scr_dir1_threshold_50": -0.5,
|
139 |
+
"scr_metric_threshold_50": -0.2971245617011812,
|
140 |
+
"scr_dir2_threshold_50": -0.2971245617011812,
|
141 |
+
"scr_dir1_threshold_100": -0.9836067656169082,
|
142 |
+
"scr_metric_threshold_100": -0.4728434212758859,
|
143 |
+
"scr_dir2_threshold_100": -0.4728434212758859,
|
144 |
+
"scr_dir1_threshold_500": 0.3442623191096433,
|
145 |
+
"scr_metric_threshold_500": -0.5047922702112575,
|
146 |
+
"scr_dir2_threshold_500": -0.5047922702112575
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.6091956936677952,
|
151 |
+
"scr_metric_threshold_2": 0.06432752716716907,
|
152 |
+
"scr_dir2_threshold_2": 0.06432752716716907,
|
153 |
+
"scr_dir1_threshold_5": 0.5057473941812173,
|
154 |
+
"scr_metric_threshold_5": 0.096491203609462,
|
155 |
+
"scr_dir2_threshold_5": 0.096491203609462,
|
156 |
+
"scr_dir1_threshold_10": -1.206895913862394,
|
157 |
+
"scr_metric_threshold_10": 0.12573094115383435,
|
158 |
+
"scr_dir2_threshold_10": 0.12573094115383435,
|
159 |
+
"scr_dir1_threshold_20": -1.0229882065033453,
|
160 |
+
"scr_metric_threshold_20": 0.2485379434097481,
|
161 |
+
"scr_dir2_threshold_20": 0.2485379434097481,
|
162 |
+
"scr_dir1_threshold_50": -1.8390798140335347,
|
163 |
+
"scr_metric_threshold_50": 0.11695912446007256,
|
164 |
+
"scr_dir2_threshold_50": 0.11695912446007256,
|
165 |
+
"scr_dir1_threshold_100": -2.0919535111241436,
|
166 |
+
"scr_metric_threshold_100": -0.14912280090236552,
|
167 |
+
"scr_dir2_threshold_100": -0.14912280090236552,
|
168 |
+
"scr_dir1_threshold_500": -3.0574705162583635,
|
169 |
+
"scr_metric_threshold_500": -0.05555553619082409,
|
170 |
+
"scr_dir2_threshold_500": -0.05555553619082409
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.46875002328306925,
|
175 |
+
"scr_metric_threshold_2": 0.05904070402975745,
|
176 |
+
"scr_dir2_threshold_2": 0.05904070402975745,
|
177 |
+
"scr_dir1_threshold_5": -0.3375001955777815,
|
178 |
+
"scr_metric_threshold_5": 0.16605170515454756,
|
179 |
+
"scr_dir2_threshold_5": 0.16605170515454756,
|
180 |
+
"scr_dir1_threshold_10": 0.43750004656613845,
|
181 |
+
"scr_metric_threshold_10": -0.3431733773571875,
|
182 |
+
"scr_dir2_threshold_10": -0.3431733773571875,
|
183 |
+
"scr_dir1_threshold_20": -1.4875005308539784,
|
184 |
+
"scr_metric_threshold_20": -0.7490774477601633,
|
185 |
+
"scr_dir2_threshold_20": -0.7490774477601633,
|
186 |
+
"scr_dir1_threshold_50": -1.7312504237518598,
|
187 |
+
"scr_metric_threshold_50": 0.29520308026215486,
|
188 |
+
"scr_dir2_threshold_50": 0.29520308026215486,
|
189 |
+
"scr_dir1_threshold_100": -1.575000540167206,
|
190 |
+
"scr_metric_threshold_100": 0.29889306927818565,
|
191 |
+
"scr_dir2_threshold_100": 0.29889306927818565,
|
192 |
+
"scr_dir1_threshold_500": -1.7312504237518598,
|
193 |
+
"scr_metric_threshold_500": -0.7970479647985121,
|
194 |
+
"scr_dir2_threshold_500": -0.7970479647985121
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.1946311033736218,
|
199 |
+
"scr_metric_threshold_2": 0.32227493105212357,
|
200 |
+
"scr_dir2_threshold_2": 0.32227493105212357,
|
201 |
+
"scr_dir1_threshold_5": 0.31543607783964034,
|
202 |
+
"scr_metric_threshold_5": 0.3791470774094751,
|
203 |
+
"scr_dir2_threshold_5": 0.3791470774094751,
|
204 |
+
"scr_dir1_threshold_10": -0.18120806174559145,
|
205 |
+
"scr_metric_threshold_10": 0.3554504203821872,
|
206 |
+
"scr_dir2_threshold_10": 0.3554504203821872,
|
207 |
+
"scr_dir1_threshold_20": 0.1946311033736218,
|
208 |
+
"scr_metric_threshold_20": 0.5213270195732026,
|
209 |
+
"scr_dir2_threshold_20": 0.5213270195732026,
|
210 |
+
"scr_dir1_threshold_50": -0.2885903946146221,
|
211 |
+
"scr_metric_threshold_50": 0.8388626757171553,
|
212 |
+
"scr_dir2_threshold_50": 0.8388626757171553,
|
213 |
+
"scr_dir1_threshold_100": 0.4026844483131891,
|
214 |
+
"scr_metric_threshold_100": 0.7440757651215695,
|
215 |
+
"scr_dir2_threshold_100": 0.7440757651215695,
|
216 |
+
"scr_dir1_threshold_500": -2.073824928814476,
|
217 |
+
"scr_metric_threshold_500": -0.03791448175180007,
|
218 |
+
"scr_dir2_threshold_500": -0.03791448175180007
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.22463779383359767,
|
223 |
+
"scr_metric_threshold_2": 0.09433952715525604,
|
224 |
+
"scr_dir2_threshold_2": 0.09433952715525604,
|
225 |
+
"scr_dir1_threshold_5": 0.3188403355827606,
|
226 |
+
"scr_metric_threshold_5": 0.22641497763420182,
|
227 |
+
"scr_dir2_threshold_5": 0.22641497763420182,
|
228 |
+
"scr_dir1_threshold_10": 0.29710148683342147,
|
229 |
+
"scr_metric_threshold_10": 0.3773583897749925,
|
230 |
+
"scr_dir2_threshold_10": 0.3773583897749925,
|
231 |
+
"scr_dir1_threshold_20": 0.26086964033350957,
|
232 |
+
"scr_metric_threshold_20": 0.5566036038315663,
|
233 |
+
"scr_dir2_threshold_20": 0.5566036038315663,
|
234 |
+
"scr_dir1_threshold_50": 0.37681146274969163,
|
235 |
+
"scr_metric_threshold_50": -0.26415090095789157,
|
236 |
+
"scr_dir2_threshold_50": -0.26415090095789157,
|
237 |
+
"scr_dir1_threshold_100": 0.5362318464999118,
|
238 |
+
"scr_metric_threshold_100": 0.7028300958453878,
|
239 |
+
"scr_dir2_threshold_100": 0.7028300958453878,
|
240 |
+
"scr_dir1_threshold_500": 0.13043482016675478,
|
241 |
+
"scr_metric_threshold_500": 0.43867919488749624,
|
242 |
+
"scr_dir2_threshold_500": 0.43867919488749624
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.12598447369816684,
|
247 |
+
"scr_metric_threshold_2": 0.10843376954834762,
|
248 |
+
"scr_dir2_threshold_2": 0.10843376954834762,
|
249 |
+
"scr_dir1_threshold_5": 0.35433074561636113,
|
250 |
+
"scr_metric_threshold_5": 0.17269068518288452,
|
251 |
+
"scr_dir2_threshold_5": 0.17269068518288452,
|
252 |
+
"scr_dir1_threshold_10": 0.33070853946600826,
|
253 |
+
"scr_metric_threshold_10": 0.23694784019349246,
|
254 |
+
"scr_dir2_threshold_10": 0.23694784019349246,
|
255 |
+
"scr_dir1_threshold_20": 0.16535450439689736,
|
256 |
+
"scr_metric_threshold_20": 0.3293173209891881,
|
257 |
+
"scr_dir2_threshold_20": 0.3293173209891881,
|
258 |
+
"scr_dir1_threshold_50": -0.23622018419238308,
|
259 |
+
"scr_metric_threshold_50": -0.008032024688290465,
|
260 |
+
"scr_dir2_threshold_50": -0.008032024688290465,
|
261 |
+
"scr_dir1_threshold_100": 0.08661444299943634,
|
262 |
+
"scr_metric_threshold_100": -0.012048276408506755,
|
263 |
+
"scr_dir2_threshold_100": -0.012048276408506755,
|
264 |
+
"scr_dir1_threshold_500": -2.4960625745351193,
|
265 |
+
"scr_metric_threshold_500": -0.24497986488178294,
|
266 |
+
"scr_dir2_threshold_500": -0.24497986488178294
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.08484846514447261,
|
271 |
+
"scr_metric_threshold_2": 0.027777750183035924,
|
272 |
+
"scr_dir2_threshold_2": 0.027777750183035924,
|
273 |
+
"scr_dir1_threshold_5": 0.21818187072585082,
|
274 |
+
"scr_metric_threshold_5": 0.2722223491580348,
|
275 |
+
"scr_dir2_threshold_5": 0.2722223491580348,
|
276 |
+
"scr_dir1_threshold_10": 0.21212098224106932,
|
277 |
+
"scr_metric_threshold_10": 0.3666668984624982,
|
278 |
+
"scr_dir2_threshold_10": 0.3666668984624982,
|
279 |
+
"scr_dir1_threshold_20": 0.3151513903594376,
|
280 |
+
"scr_metric_threshold_20": 0.3888888336994052,
|
281 |
+
"scr_dir2_threshold_20": 0.3888888336994052,
|
282 |
+
"scr_dir1_threshold_50": 0.29696944738554193,
|
283 |
+
"scr_metric_threshold_50": 0.450000149011606,
|
284 |
+
"scr_dir2_threshold_50": 0.450000149011606,
|
285 |
+
"scr_dir1_threshold_100": 0.29696944738554193,
|
286 |
+
"scr_metric_threshold_100": 0.6111111663005948,
|
287 |
+
"scr_dir2_threshold_100": 0.6111111663005948,
|
288 |
+
"scr_dir1_threshold_500": -1.3636363307963433,
|
289 |
+
"scr_metric_threshold_500": 0.2833333167764882,
|
290 |
+
"scr_dir2_threshold_500": 0.2833333167764882
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_9",
|
296 |
+
"sae_lens_version": "5.3.1",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "batch_topk",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_12_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 125,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": false,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "d8903a61-9981-467d-a5cc-8deee324b96f",
|
73 |
+
"datetime_epoch_millis": 1736483377769,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.40170999542424246,
|
77 |
+
"scr_metric_threshold_2": 0.07226518453444469,
|
78 |
+
"scr_dir2_threshold_2": 0.07226518453444469,
|
79 |
+
"scr_dir1_threshold_5": 0.4458180266715228,
|
80 |
+
"scr_metric_threshold_5": 0.09990830557523789,
|
81 |
+
"scr_dir2_threshold_5": 0.09990830557523789,
|
82 |
+
"scr_dir1_threshold_10": 0.5109758754990873,
|
83 |
+
"scr_metric_threshold_10": 0.17718009615565208,
|
84 |
+
"scr_dir2_threshold_10": 0.17718009615565208,
|
85 |
+
"scr_dir1_threshold_20": 0.5504574374069586,
|
86 |
+
"scr_metric_threshold_20": 0.2097968386270702,
|
87 |
+
"scr_dir2_threshold_20": 0.2097968386270702,
|
88 |
+
"scr_dir1_threshold_50": 0.3591579137019672,
|
89 |
+
"scr_metric_threshold_50": 0.2599182800589988,
|
90 |
+
"scr_dir2_threshold_50": 0.2599182800589988,
|
91 |
+
"scr_dir1_threshold_100": 0.06421485795998529,
|
92 |
+
"scr_metric_threshold_100": 0.4103238061974014,
|
93 |
+
"scr_dir2_threshold_100": 0.4103238061974014,
|
94 |
+
"scr_dir1_threshold_500": -0.9193706029130219,
|
95 |
+
"scr_metric_threshold_500": -0.17863545335821718,
|
96 |
+
"scr_dir2_threshold_500": -0.17863545335821718
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.5764706212343906,
|
103 |
+
"scr_metric_threshold_2": 0.016901357844105153,
|
104 |
+
"scr_dir2_threshold_2": 0.016901357844105153,
|
105 |
+
"scr_dir1_threshold_5": 0.5882351703710352,
|
106 |
+
"scr_metric_threshold_5": 0.05915483640456743,
|
107 |
+
"scr_dir2_threshold_5": 0.05915483640456743,
|
108 |
+
"scr_dir1_threshold_10": 0.6470586172850588,
|
109 |
+
"scr_metric_threshold_10": 0.12676043568138684,
|
110 |
+
"scr_dir2_threshold_10": 0.12676043568138684,
|
111 |
+
"scr_dir1_threshold_20": 0.682352965925793,
|
112 |
+
"scr_metric_threshold_20": 0.1577464542793782,
|
113 |
+
"scr_dir2_threshold_20": 0.1577464542793782,
|
114 |
+
"scr_dir1_threshold_50": 0.7647055111131057,
|
115 |
+
"scr_metric_threshold_50": 0.23380281642844958,
|
116 |
+
"scr_dir2_threshold_50": 0.23380281642844958,
|
117 |
+
"scr_dir1_threshold_100": 0.5999997195076798,
|
118 |
+
"scr_metric_threshold_100": 0.12112670570015138,
|
119 |
+
"scr_dir2_threshold_100": 0.12112670570015138,
|
120 |
+
"scr_dir1_threshold_500": -2.7529395595148607,
|
121 |
+
"scr_metric_threshold_500": 0.3154929085587564,
|
122 |
+
"scr_dir2_threshold_500": 0.3154929085587564
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.7704917872602378,
|
127 |
+
"scr_metric_threshold_2": 0.08945685319110486,
|
128 |
+
"scr_dir2_threshold_2": 0.08945685319110486,
|
129 |
+
"scr_dir1_threshold_5": 0.7868855102060758,
|
130 |
+
"scr_metric_threshold_5": 0.13738031702432307,
|
131 |
+
"scr_dir2_threshold_5": 0.13738031702432307,
|
132 |
+
"scr_dir1_threshold_10": 0.7295082127397622,
|
133 |
+
"scr_metric_threshold_10": 0.2044728617025713,
|
134 |
+
"scr_dir2_threshold_10": 0.2044728617025713,
|
135 |
+
"scr_dir1_threshold_20": 0.7704917872602378,
|
136 |
+
"scr_metric_threshold_20": 0.2683707500034754,
|
137 |
+
"scr_dir2_threshold_20": 0.2683707500034754,
|
138 |
+
"scr_dir1_threshold_50": 0.622950723561427,
|
139 |
+
"scr_metric_threshold_50": 0.297124752131342,
|
140 |
+
"scr_dir2_threshold_50": 0.297124752131342,
|
141 |
+
"scr_dir1_threshold_100": 0.28688502164332963,
|
142 |
+
"scr_metric_threshold_100": 0.6261981627678948,
|
143 |
+
"scr_dir2_threshold_100": 0.6261981627678948,
|
144 |
+
"scr_dir1_threshold_500": -0.0573772974663136,
|
145 |
+
"scr_metric_threshold_500": -0.5271563882939533,
|
146 |
+
"scr_dir2_threshold_500": -0.5271563882939533
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.5632185955503426,
|
151 |
+
"scr_metric_threshold_2": 0.12280700225591376,
|
152 |
+
"scr_dir2_threshold_2": 0.12280700225591376,
|
153 |
+
"scr_dir1_threshold_5": 0.586206802053688,
|
154 |
+
"scr_metric_threshold_5": 0.096491203609462,
|
155 |
+
"scr_dir2_threshold_5": 0.096491203609462,
|
156 |
+
"scr_dir1_threshold_10": 0.5977015904161226,
|
157 |
+
"scr_metric_threshold_10": 0.18421059052516223,
|
158 |
+
"scr_dir2_threshold_10": 0.18421059052516223,
|
159 |
+
"scr_dir1_threshold_20": 0.620689796919468,
|
160 |
+
"scr_metric_threshold_20": 0.192982407218924,
|
161 |
+
"scr_dir2_threshold_20": 0.192982407218924,
|
162 |
+
"scr_dir1_threshold_50": 0.724138096406046,
|
163 |
+
"scr_metric_threshold_50": 0.3099415316789966,
|
164 |
+
"scr_dir2_threshold_50": 0.3099415316789966,
|
165 |
+
"scr_dir1_threshold_100": 0.4482761928120919,
|
166 |
+
"scr_metric_threshold_100": 0.4473684027070965,
|
167 |
+
"scr_dir2_threshold_100": 0.4473684027070965,
|
168 |
+
"scr_dir1_threshold_500": 0.0804600929832326,
|
169 |
+
"scr_metric_threshold_500": 0.020467746568027374,
|
170 |
+
"scr_dir2_threshold_500": 0.020467746568027374
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.5437501909211676,
|
175 |
+
"scr_metric_threshold_2": 0.08487084708528918,
|
176 |
+
"scr_dir2_threshold_2": 0.08487084708528918,
|
177 |
+
"scr_dir1_threshold_5": 0.5374998975544955,
|
178 |
+
"scr_metric_threshold_5": 0.10701122106810633,
|
179 |
+
"scr_dir2_threshold_5": 0.10701122106810633,
|
180 |
+
"scr_dir1_threshold_10": 0.6249999068677231,
|
181 |
+
"scr_metric_threshold_10": 0.11070121008413712,
|
182 |
+
"scr_dir2_threshold_10": 0.11070121008413712,
|
183 |
+
"scr_dir1_threshold_20": 0.5062499208375646,
|
184 |
+
"scr_metric_threshold_20": -0.011069967048092365,
|
185 |
+
"scr_dir2_threshold_20": -0.011069967048092365,
|
186 |
+
"scr_dir1_threshold_50": -0.10000022351746454,
|
187 |
+
"scr_metric_threshold_50": 0.21402222219289643,
|
188 |
+
"scr_dir2_threshold_50": 0.21402222219289643,
|
189 |
+
"scr_dir1_threshold_100": -0.437500419095246,
|
190 |
+
"scr_metric_threshold_100": 0.47970473049685636,
|
191 |
+
"scr_dir2_threshold_100": 0.47970473049685636,
|
192 |
+
"scr_dir1_threshold_500": -1.1000002235174646,
|
193 |
+
"scr_metric_threshold_500": -0.32472321233371737,
|
194 |
+
"scr_dir2_threshold_500": -0.32472321233371737
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.0,
|
199 |
+
"scr_metric_threshold_2": -0.014217824724512168,
|
200 |
+
"scr_dir2_threshold_2": -0.014217824724512168,
|
201 |
+
"scr_dir1_threshold_5": 0.20805374497060972,
|
202 |
+
"scr_metric_threshold_5": 0.056872146357351556,
|
203 |
+
"scr_dir2_threshold_5": 0.056872146357351556,
|
204 |
+
"scr_dir1_threshold_10": 0.2483220697926159,
|
205 |
+
"scr_metric_threshold_10": 0.3080568238411771,
|
206 |
+
"scr_dir2_threshold_10": 0.3080568238411771,
|
207 |
+
"scr_dir1_threshold_20": 0.3087247570411464,
|
208 |
+
"scr_metric_threshold_20": 0.2606635097866013,
|
209 |
+
"scr_dir2_threshold_20": 0.2606635097866013,
|
210 |
+
"scr_dir1_threshold_50": -0.18120806174559145,
|
211 |
+
"scr_metric_threshold_50": 0.09952618550375665,
|
212 |
+
"scr_dir2_threshold_50": 0.09952618550375665,
|
213 |
+
"scr_dir1_threshold_100": -1.5973147516247261,
|
214 |
+
"scr_metric_threshold_100": 0.11374429271470311,
|
215 |
+
"scr_dir2_threshold_100": 0.11374429271470311,
|
216 |
+
"scr_dir1_threshold_500": -2.1476502576599943,
|
217 |
+
"scr_metric_threshold_500": -0.31753537365751855,
|
218 |
+
"scr_dir2_threshold_500": -0.31753537365751855
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.3550721820826725,
|
223 |
+
"scr_metric_threshold_2": 0.10849056869013178,
|
224 |
+
"scr_dir2_threshold_2": 0.10849056869013178,
|
225 |
+
"scr_dir1_threshold_5": 0.3695651798332452,
|
226 |
+
"scr_metric_threshold_5": 0.1981131757184187,
|
227 |
+
"scr_dir2_threshold_5": 0.1981131757184187,
|
228 |
+
"scr_dir1_threshold_10": 0.5362318464999118,
|
229 |
+
"scr_metric_threshold_10": 0.23584909904210843,
|
230 |
+
"scr_dir2_threshold_10": 0.23584909904210843,
|
231 |
+
"scr_dir1_threshold_20": 0.5724636929998238,
|
232 |
+
"scr_metric_threshold_20": 0.476415118211186,
|
233 |
+
"scr_dir2_threshold_20": 0.476415118211186,
|
234 |
+
"scr_dir1_threshold_50": -0.014492997750572723,
|
235 |
+
"scr_metric_threshold_50": 0.5943395271552561,
|
236 |
+
"scr_dir2_threshold_50": 0.5943395271552561,
|
237 |
+
"scr_dir1_threshold_100": 0.3695651798332452,
|
238 |
+
"scr_metric_threshold_100": 0.7405660191690776,
|
239 |
+
"scr_dir2_threshold_100": 0.7405660191690776,
|
240 |
+
"scr_dir1_threshold_500": -1.0652174100833773,
|
241 |
+
"scr_metric_threshold_500": 0.033018722042752274,
|
242 |
+
"scr_dir2_threshold_500": 0.033018722042752274
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.2834645964930889,
|
247 |
+
"scr_metric_threshold_2": 0.06425715501060796,
|
248 |
+
"scr_dir2_threshold_2": 0.06425715501060796,
|
249 |
+
"scr_dir1_threshold_5": 0.33858292106798354,
|
250 |
+
"scr_metric_threshold_5": 0.11646579423663808,
|
251 |
+
"scr_dir2_threshold_5": 0.11646579423663808,
|
252 |
+
"scr_dir1_threshold_10": 0.42519689473963335,
|
253 |
+
"scr_metric_threshold_10": 0.18072294924724605,
|
254 |
+
"scr_dir2_threshold_10": 0.18072294924724605,
|
255 |
+
"scr_dir1_threshold_20": 0.5669291929861778,
|
256 |
+
"scr_metric_threshold_20": 0.2610441536344349,
|
257 |
+
"scr_dir2_threshold_20": 0.2610441536344349,
|
258 |
+
"scr_dir1_threshold_50": 0.6692914605339918,
|
259 |
+
"scr_metric_threshold_50": 0.36947792318278255,
|
260 |
+
"scr_dir2_threshold_50": 0.36947792318278255,
|
261 |
+
"scr_dir1_threshold_100": 0.7165354035069111,
|
262 |
+
"scr_metric_threshold_100": 0.5261043196130151,
|
263 |
+
"scr_dir2_threshold_100": 0.5261043196130151,
|
264 |
+
"scr_dir1_threshold_500": 0.1968506228214391,
|
265 |
+
"scr_metric_threshold_500": 0.06024090329039166,
|
266 |
+
"scr_dir2_threshold_500": 0.06024090329039166
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.12121198985203961,
|
271 |
+
"scr_metric_threshold_2": 0.10555551692291695,
|
272 |
+
"scr_dir2_threshold_2": 0.10555551692291695,
|
273 |
+
"scr_dir1_threshold_5": 0.15151498731504953,
|
274 |
+
"scr_metric_threshold_5": 0.027777750183035924,
|
275 |
+
"scr_dir2_threshold_5": 0.027777750183035924,
|
276 |
+
"scr_dir1_threshold_10": 0.27878786565187064,
|
277 |
+
"scr_metric_threshold_10": 0.06666679912142758,
|
278 |
+
"scr_dir2_threshold_10": 0.06666679912142758,
|
279 |
+
"scr_dir1_threshold_20": 0.37575738528545743,
|
280 |
+
"scr_metric_threshold_20": 0.0722222829306543,
|
281 |
+
"scr_dir2_threshold_20": 0.0722222829306543,
|
282 |
+
"scr_dir1_threshold_50": 0.38787880101479605,
|
283 |
+
"scr_metric_threshold_50": -0.038888717801489385,
|
284 |
+
"scr_dir2_threshold_50": -0.038888717801489385,
|
285 |
+
"scr_dir1_threshold_100": 0.12727251709659673,
|
286 |
+
"scr_metric_threshold_100": 0.22777781641041636,
|
287 |
+
"scr_dir2_threshold_100": 0.22777781641041636,
|
288 |
+
"scr_dir1_threshold_500": -0.5090907908668356,
|
289 |
+
"scr_metric_threshold_500": -0.6888889330404758,
|
290 |
+
"scr_dir2_threshold_500": -0.6888889330404758
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAETrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_12",
|
296 |
+
"sae_lens_version": "5.3.0",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "gated",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_13_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 125,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": false,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "4b05f9c9-d21c-4f31-9126-f3cbe40fef64",
|
73 |
+
"datetime_epoch_millis": 1736483524800,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.3565870710076725,
|
77 |
+
"scr_metric_threshold_2": 0.08989815526881219,
|
78 |
+
"scr_dir2_threshold_2": 0.08989815526881219,
|
79 |
+
"scr_dir1_threshold_5": 0.40208666839051405,
|
80 |
+
"scr_metric_threshold_5": 0.18024336067234514,
|
81 |
+
"scr_dir2_threshold_5": 0.18024336067234514,
|
82 |
+
"scr_dir1_threshold_10": 0.46266938198205093,
|
83 |
+
"scr_metric_threshold_10": 0.22876153353421425,
|
84 |
+
"scr_dir2_threshold_10": 0.22876153353421425,
|
85 |
+
"scr_dir1_threshold_20": 0.47062370802986553,
|
86 |
+
"scr_metric_threshold_20": 0.30025250733423564,
|
87 |
+
"scr_dir2_threshold_20": 0.30025250733423564,
|
88 |
+
"scr_dir1_threshold_50": 0.4936315142666215,
|
89 |
+
"scr_metric_threshold_50": 0.49947406950164996,
|
90 |
+
"scr_dir2_threshold_50": 0.49947406950164996,
|
91 |
+
"scr_dir1_threshold_100": 0.16912859039510408,
|
92 |
+
"scr_metric_threshold_100": 0.4600275731265039,
|
93 |
+
"scr_dir2_threshold_100": 0.4600275731265039,
|
94 |
+
"scr_dir1_threshold_500": -0.6967877997098797,
|
95 |
+
"scr_metric_threshold_500": -0.03796300668344031,
|
96 |
+
"scr_dir2_threshold_500": -0.03796300668344031
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.5764706212343906,
|
103 |
+
"scr_metric_threshold_2": 0.014084492853487425,
|
104 |
+
"scr_dir2_threshold_2": 0.014084492853487425,
|
105 |
+
"scr_dir1_threshold_5": 0.5529415229611013,
|
106 |
+
"scr_metric_threshold_5": 0.04788720854169773,
|
107 |
+
"scr_dir2_threshold_5": 0.04788720854169773,
|
108 |
+
"scr_dir1_threshold_10": 0.682352965925793,
|
109 |
+
"scr_metric_threshold_10": 0.09859144997441198,
|
110 |
+
"scr_dir2_threshold_10": 0.09859144997441198,
|
111 |
+
"scr_dir1_threshold_20": 0.635294068148414,
|
112 |
+
"scr_metric_threshold_20": 0.17464781212348335,
|
113 |
+
"scr_dir2_threshold_20": 0.17464781212348335,
|
114 |
+
"scr_dir1_threshold_50": 0.5882351703710352,
|
115 |
+
"scr_metric_threshold_50": 0.2760562949889119,
|
116 |
+
"scr_dir2_threshold_50": 0.2760562949889119,
|
117 |
+
"scr_dir1_threshold_100": 0.1529412424687812,
|
118 |
+
"scr_metric_threshold_100": 0.14929569140712623,
|
119 |
+
"scr_dir2_threshold_100": 0.14929569140712623,
|
120 |
+
"scr_dir1_threshold_500": 0.1529412424687812,
|
121 |
+
"scr_metric_threshold_500": 0.2028167978304582,
|
122 |
+
"scr_dir2_threshold_500": 0.2028167978304582
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.7459019356856001,
|
127 |
+
"scr_metric_threshold_2": 0.09265189042877064,
|
128 |
+
"scr_dir2_threshold_2": 0.09265189042877064,
|
129 |
+
"scr_dir1_threshold_5": 0.7704917872602378,
|
130 |
+
"scr_metric_threshold_5": 0.2012780148950663,
|
131 |
+
"scr_dir2_threshold_5": 0.2012780148950663,
|
132 |
+
"scr_dir1_threshold_10": 0.8196724675350056,
|
133 |
+
"scr_metric_threshold_10": 0.28434517447116114,
|
134 |
+
"scr_dir2_threshold_10": 0.28434517447116114,
|
135 |
+
"scr_dir1_threshold_20": 0.893442999384411,
|
136 |
+
"scr_metric_threshold_20": 0.39616614574496184,
|
137 |
+
"scr_dir2_threshold_20": 0.39616614574496184,
|
138 |
+
"scr_dir1_threshold_50": 0.8196724675350056,
|
139 |
+
"scr_metric_threshold_50": 0.5942493138325231,
|
140 |
+
"scr_dir2_threshold_50": 0.5942493138325231,
|
141 |
+
"scr_dir1_threshold_100": 0.8360657019180974,
|
142 |
+
"scr_metric_threshold_100": 0.6996805914913138,
|
143 |
+
"scr_dir2_threshold_100": 0.6996805914913138,
|
144 |
+
"scr_dir1_threshold_500": -0.8360657019180974,
|
145 |
+
"scr_metric_threshold_500": -0.5207666946789433,
|
146 |
+
"scr_dir2_threshold_500": -0.5207666946789433
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.49425329092954456,
|
151 |
+
"scr_metric_threshold_2": 0.07894739593935524,
|
152 |
+
"scr_dir2_threshold_2": 0.07894739593935524,
|
153 |
+
"scr_dir1_threshold_5": 0.47126439931543734,
|
154 |
+
"scr_metric_threshold_5": 0.11403501127956878,
|
155 |
+
"scr_dir2_threshold_5": 0.11403501127956878,
|
156 |
+
"scr_dir1_threshold_10": 0.4827591876778719,
|
157 |
+
"scr_metric_threshold_10": 0.19005846832100343,
|
158 |
+
"scr_dir2_threshold_10": 0.19005846832100343,
|
159 |
+
"scr_dir1_threshold_20": 0.586206802053688,
|
160 |
+
"scr_metric_threshold_20": 0.289473610828386,
|
161 |
+
"scr_dir2_threshold_20": 0.289473610828386,
|
162 |
+
"scr_dir1_threshold_50": 0.5977015904161226,
|
163 |
+
"scr_metric_threshold_50": 0.36549706786982067,
|
164 |
+
"scr_dir2_threshold_50": 0.36549706786982067,
|
165 |
+
"scr_dir1_threshold_100": 0.2988507952080613,
|
166 |
+
"scr_metric_threshold_100": 0.4444444638091759,
|
167 |
+
"scr_dir2_threshold_100": 0.4444444638091759,
|
168 |
+
"scr_dir1_threshold_500": -0.7586204061610641,
|
169 |
+
"scr_metric_threshold_500": -0.37134511994824504,
|
170 |
+
"scr_dir2_threshold_500": -0.37134511994824504
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.3312499022111093,
|
175 |
+
"scr_metric_threshold_2": 0.13653135313966885,
|
176 |
+
"scr_dir2_threshold_2": 0.13653135313966885,
|
177 |
+
"scr_dir1_threshold_5": 0.47499994412063384,
|
178 |
+
"scr_metric_threshold_5": 0.24354257420777517,
|
179 |
+
"scr_dir2_threshold_5": 0.24354257420777517,
|
180 |
+
"scr_dir1_threshold_10": 0.5500001117587323,
|
181 |
+
"scr_metric_threshold_10": 0.2730627062793377,
|
182 |
+
"scr_dir2_threshold_10": 0.2730627062793377,
|
183 |
+
"scr_dir1_threshold_20": 0.599999850988357,
|
184 |
+
"scr_metric_threshold_20": 0.21771221120892723,
|
185 |
+
"scr_dir2_threshold_20": 0.21771221120892723,
|
186 |
+
"scr_dir1_threshold_50": 0.6562498835846539,
|
187 |
+
"scr_metric_threshold_50": 0.6309962596439646,
|
188 |
+
"scr_dir2_threshold_50": 0.6309962596439646,
|
189 |
+
"scr_dir1_threshold_100": 0.7000000745058215,
|
190 |
+
"scr_metric_threshold_100": -0.2804426843113993,
|
191 |
+
"scr_dir2_threshold_100": -0.2804426843113993,
|
192 |
+
"scr_dir1_threshold_500": -1.0437505634502753,
|
193 |
+
"scr_metric_threshold_500": -0.5018448845363573,
|
194 |
+
"scr_dir2_threshold_500": -0.5018448845363573
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.16107369931906715,
|
199 |
+
"scr_metric_threshold_2": 0.1753554314937911,
|
200 |
+
"scr_dir2_threshold_2": 0.1753554314937911,
|
201 |
+
"scr_dir1_threshold_5": 0.18120806174559145,
|
202 |
+
"scr_metric_threshold_5": 0.29857827402483567,
|
203 |
+
"scr_dir2_threshold_5": 0.29857827402483567,
|
204 |
+
"scr_dir1_threshold_10": 0.25503339059110985,
|
205 |
+
"scr_metric_threshold_10": 0.5023696374540854,
|
206 |
+
"scr_dir2_threshold_10": 0.5023696374540854,
|
207 |
+
"scr_dir1_threshold_20": 0.4228188107397134,
|
208 |
+
"scr_metric_threshold_20": 0.6398105871960764,
|
209 |
+
"scr_dir2_threshold_20": 0.6398105871960764,
|
210 |
+
"scr_dir1_threshold_50": 0.3758387650881708,
|
211 |
+
"scr_metric_threshold_50": 0.8767771574689555,
|
212 |
+
"scr_dir2_threshold_50": 0.8767771574689555,
|
213 |
+
"scr_dir1_threshold_100": -0.46979845636021356,
|
214 |
+
"scr_metric_threshold_100": 1.0,
|
215 |
+
"scr_dir2_threshold_100": 1.0,
|
216 |
+
"scr_dir1_threshold_500": -0.31543607783964034,
|
217 |
+
"scr_metric_threshold_500": 0.7867298042679746,
|
218 |
+
"scr_dir2_threshold_500": 0.7867298042679746
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.27536220616640233,
|
223 |
+
"scr_metric_threshold_2": 0.17924521405657382,
|
224 |
+
"scr_dir2_threshold_2": 0.17924521405657382,
|
225 |
+
"scr_dir1_threshold_5": 0.37681146274969163,
|
226 |
+
"scr_metric_threshold_5": 0.3018868242815813,
|
227 |
+
"scr_dir2_threshold_5": 0.3018868242815813,
|
228 |
+
"scr_dir1_threshold_10": 0.4637681535000881,
|
229 |
+
"scr_metric_threshold_10": 0.32075450478945783,
|
230 |
+
"scr_dir2_threshold_10": 0.32075450478945783,
|
231 |
+
"scr_dir1_threshold_20": 0.3550721820826725,
|
232 |
+
"scr_metric_threshold_20": 0.4009432715638065,
|
233 |
+
"scr_dir2_threshold_20": 0.4009432715638065,
|
234 |
+
"scr_dir1_threshold_50": 0.5507244123328047,
|
235 |
+
"scr_metric_threshold_50": 0.4811320383381551,
|
236 |
+
"scr_dir2_threshold_50": 0.4811320383381551,
|
237 |
+
"scr_dir1_threshold_100": -0.27536220616640233,
|
238 |
+
"scr_metric_threshold_100": 0.8160375846624888,
|
239 |
+
"scr_dir2_threshold_100": 0.8160375846624888,
|
240 |
+
"scr_dir1_threshold_500": 0.0,
|
241 |
+
"scr_metric_threshold_500": 0.5754715654934112,
|
242 |
+
"scr_dir2_threshold_500": 0.5754715654934112
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.16535450439689736,
|
247 |
+
"scr_metric_threshold_2": 0.09236948079569564,
|
248 |
+
"scr_dir2_threshold_2": 0.09236948079569564,
|
249 |
+
"scr_dir1_threshold_5": 0.18897624121946377,
|
250 |
+
"scr_metric_threshold_5": 0.18473896159139128,
|
251 |
+
"scr_dir2_threshold_5": 0.18473896159139128,
|
252 |
+
"scr_dir1_threshold_10": 0.18110232894527498,
|
253 |
+
"scr_metric_threshold_10": 0.11646579423663808,
|
254 |
+
"scr_dir2_threshold_10": 0.11646579423663808,
|
255 |
+
"scr_dir1_threshold_20": -0.05511785524710812,
|
256 |
+
"scr_metric_threshold_20": 0.2610441536344349,
|
257 |
+
"scr_dir2_threshold_20": 0.2610441536344349,
|
258 |
+
"scr_dir1_threshold_50": -0.03937003069873052,
|
259 |
+
"scr_metric_threshold_50": 0.43775109053753575,
|
260 |
+
"scr_dir2_threshold_50": 0.43775109053753575,
|
261 |
+
"scr_dir1_threshold_100": -0.04724394297291932,
|
262 |
+
"scr_metric_threshold_100": 0.6345380891613627,
|
263 |
+
"scr_dir2_threshold_100": 0.6345380891613627,
|
264 |
+
"scr_dir1_threshold_500": -1.93700729382313,
|
265 |
+
"scr_metric_threshold_500": -0.08032120438718889,
|
266 |
+
"scr_dir2_threshold_500": -0.08032120438718889
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.1030304081183683,
|
271 |
+
"scr_metric_threshold_2": -0.05000001655684511,
|
272 |
+
"scr_dir2_threshold_2": -0.05000001655684511,
|
273 |
+
"scr_dir1_threshold_5": 0.19999992775195513,
|
274 |
+
"scr_metric_threshold_5": 0.05000001655684511,
|
275 |
+
"scr_dir2_threshold_5": 0.05000001655684511,
|
276 |
+
"scr_dir1_threshold_10": 0.266666449922532,
|
277 |
+
"scr_metric_threshold_10": 0.04444453274761838,
|
278 |
+
"scr_dir2_threshold_10": 0.04444453274761838,
|
279 |
+
"scr_dir1_threshold_20": 0.32727280608877624,
|
280 |
+
"scr_metric_threshold_20": 0.02222226637380919,
|
281 |
+
"scr_dir2_threshold_20": 0.02222226637380919,
|
282 |
+
"scr_dir1_threshold_50": 0.39999985550391026,
|
283 |
+
"scr_metric_threshold_50": 0.3333333333333333,
|
284 |
+
"scr_dir2_threshold_50": 0.3333333333333333,
|
285 |
+
"scr_dir1_threshold_100": 0.1575755145596066,
|
286 |
+
"scr_metric_threshold_100": 0.2166668487919629,
|
287 |
+
"scr_dir2_threshold_100": 0.2166668487919629,
|
288 |
+
"scr_dir1_threshold_500": -0.8363635969556119,
|
289 |
+
"scr_metric_threshold_500": -0.3944443175086319,
|
290 |
+
"scr_dir2_threshold_500": -0.3944443175086319
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAETrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_13",
|
296 |
+
"sae_lens_version": "5.3.0",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "gated",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_14_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 125,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": false,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "856d59ed-598e-40bc-a47d-16a7f20bb0ad",
|
73 |
+
"datetime_epoch_millis": 1736483670039,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.36569708960879344,
|
77 |
+
"scr_metric_threshold_2": 0.10503078530101546,
|
78 |
+
"scr_dir2_threshold_2": 0.10503078530101546,
|
79 |
+
"scr_dir1_threshold_5": 0.43704600166092317,
|
80 |
+
"scr_metric_threshold_5": 0.16611491723374155,
|
81 |
+
"scr_dir2_threshold_5": 0.16611491723374155,
|
82 |
+
"scr_dir1_threshold_10": 0.48905799640061126,
|
83 |
+
"scr_metric_threshold_10": 0.20953824990667835,
|
84 |
+
"scr_dir2_threshold_10": 0.20953824990667835,
|
85 |
+
"scr_dir1_threshold_20": 0.4787097104525107,
|
86 |
+
"scr_metric_threshold_20": 0.29005410865153947,
|
87 |
+
"scr_dir2_threshold_20": 0.29005410865153947,
|
88 |
+
"scr_dir1_threshold_50": 0.3750827950706698,
|
89 |
+
"scr_metric_threshold_50": 0.4573134644166907,
|
90 |
+
"scr_dir2_threshold_50": 0.4573134644166907,
|
91 |
+
"scr_dir1_threshold_100": 0.35347457077812156,
|
92 |
+
"scr_metric_threshold_100": 0.4905756615651292,
|
93 |
+
"scr_dir2_threshold_100": 0.4905756615651292,
|
94 |
+
"scr_dir1_threshold_500": -0.5897202638460304,
|
95 |
+
"scr_metric_threshold_500": 0.20553954935382832,
|
96 |
+
"scr_dir2_threshold_500": 0.20553954935382832
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.5882351703710352,
|
103 |
+
"scr_metric_threshold_2": 0.0,
|
104 |
+
"scr_dir2_threshold_2": 0.0,
|
105 |
+
"scr_dir1_threshold_5": 0.682352965925793,
|
106 |
+
"scr_metric_threshold_5": 0.022535087825340604,
|
107 |
+
"scr_dir2_threshold_5": 0.022535087825340604,
|
108 |
+
"scr_dir1_threshold_10": 0.7058820641990823,
|
109 |
+
"scr_metric_threshold_10": 0.07323932925805485,
|
110 |
+
"scr_dir2_threshold_10": 0.07323932925805485,
|
111 |
+
"scr_dir1_threshold_20": 0.6470586172850588,
|
112 |
+
"scr_metric_threshold_20": 0.14366196142589077,
|
113 |
+
"scr_dir2_threshold_20": 0.14366196142589077,
|
114 |
+
"scr_dir1_threshold_50": 0.6588231664217034,
|
115 |
+
"scr_metric_threshold_50": 0.1915491699675885,
|
116 |
+
"scr_dir2_threshold_50": 0.1915491699675885,
|
117 |
+
"scr_dir1_threshold_100": 0.45882372740634364,
|
118 |
+
"scr_metric_threshold_100": 0.05352110642333198,
|
119 |
+
"scr_dir2_threshold_100": 0.05352110642333198,
|
120 |
+
"scr_dir1_threshold_500": -1.2705874350660278,
|
121 |
+
"scr_metric_threshold_500": 0.25352103926317243,
|
122 |
+
"scr_dir2_threshold_500": 0.25352103926317243
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.762295170068692,
|
127 |
+
"scr_metric_threshold_2": 0.09904158404378066,
|
128 |
+
"scr_dir2_threshold_2": 0.09904158404378066,
|
129 |
+
"scr_dir1_threshold_5": 0.7704917872602378,
|
130 |
+
"scr_metric_threshold_5": 0.16613431915218968,
|
131 |
+
"scr_dir2_threshold_5": 0.16613431915218968,
|
132 |
+
"scr_dir1_threshold_10": 0.7950821273976216,
|
133 |
+
"scr_metric_threshold_10": 0.24281159468311375,
|
134 |
+
"scr_dir2_threshold_10": 0.24281159468311375,
|
135 |
+
"scr_dir1_threshold_20": 0.8524589363011892,
|
136 |
+
"scr_metric_threshold_20": 0.3290736010667136,
|
137 |
+
"scr_dir2_threshold_20": 0.3290736010667136,
|
138 |
+
"scr_dir1_threshold_50": 0.8360657019180974,
|
139 |
+
"scr_metric_threshold_50": 0.5079873074489233,
|
140 |
+
"scr_dir2_threshold_50": 0.5079873074489233,
|
141 |
+
"scr_dir1_threshold_100": 0.8688526592470271,
|
142 |
+
"scr_metric_threshold_100": 0.6677317425559421,
|
143 |
+
"scr_dir2_threshold_100": 0.6677317425559421,
|
144 |
+
"scr_dir1_threshold_500": 0.47540965986261613,
|
145 |
+
"scr_metric_threshold_500": -0.28115013723349536,
|
146 |
+
"scr_dir2_threshold_500": -0.28115013723349536
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.5057473941812173,
|
151 |
+
"scr_metric_threshold_2": 0.10526319458580699,
|
152 |
+
"scr_dir2_threshold_2": 0.10526319458580699,
|
153 |
+
"scr_dir1_threshold_5": 0.6091956936677952,
|
154 |
+
"scr_metric_threshold_5": 0.11695912446007256,
|
155 |
+
"scr_dir2_threshold_5": 0.11695912446007256,
|
156 |
+
"scr_dir1_threshold_10": 0.5747126988020154,
|
157 |
+
"scr_metric_threshold_10": 0.15497067869820672,
|
158 |
+
"scr_dir2_threshold_10": 0.15497067869820672,
|
159 |
+
"scr_dir1_threshold_20": 0.4252873011979847,
|
160 |
+
"scr_metric_threshold_20": 0.1871345294230828,
|
161 |
+
"scr_dir2_threshold_20": 0.1871345294230828,
|
162 |
+
"scr_dir1_threshold_50": -0.3678160998288593,
|
163 |
+
"scr_metric_threshold_50": 0.26900586426035866,
|
164 |
+
"scr_dir2_threshold_50": 0.26900586426035866,
|
165 |
+
"scr_dir1_threshold_100": -0.2528730119798468,
|
166 |
+
"scr_metric_threshold_100": 0.36549706786982067,
|
167 |
+
"scr_dir2_threshold_100": 0.36549706786982067,
|
168 |
+
"scr_dir1_threshold_500": -2.6091950085570335,
|
169 |
+
"scr_metric_threshold_500": -0.3362573303254483,
|
170 |
+
"scr_dir2_threshold_500": -0.3362573303254483
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.4062500698492077,
|
175 |
+
"scr_metric_threshold_2": 0.08856083610131997,
|
176 |
+
"scr_dir2_threshold_2": 0.08856083610131997,
|
177 |
+
"scr_dir1_threshold_5": 0.43750004656613845,
|
178 |
+
"scr_metric_threshold_5": 0.12915137510760727,
|
179 |
+
"scr_dir2_threshold_5": 0.12915137510760727,
|
180 |
+
"scr_dir1_threshold_10": 0.4062500698492077,
|
181 |
+
"scr_metric_threshold_10": 0.16236171613851677,
|
182 |
+
"scr_dir2_threshold_10": 0.16236171613851677,
|
183 |
+
"scr_dir1_threshold_20": 0.36249987892804003,
|
184 |
+
"scr_metric_threshold_20": 0.17712189214595614,
|
185 |
+
"scr_dir2_threshold_20": 0.17712189214595614,
|
186 |
+
"scr_dir1_threshold_50": 0.08124971594655549,
|
187 |
+
"scr_metric_threshold_50": 0.6678965896909048,
|
188 |
+
"scr_dir2_threshold_50": 0.6678965896909048,
|
189 |
+
"scr_dir1_threshold_100": -0.2625000279396831,
|
190 |
+
"scr_metric_threshold_100": 0.645756435651404,
|
191 |
+
"scr_dir2_threshold_100": 0.645756435651404,
|
192 |
+
"scr_dir1_threshold_500": -0.5312503492460383,
|
193 |
+
"scr_metric_threshold_500": -0.20295203520148788,
|
194 |
+
"scr_dir2_threshold_500": -0.20295203520148788
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.06711400804702444,
|
199 |
+
"scr_metric_threshold_2": 0.3554504203821872,
|
200 |
+
"scr_dir2_threshold_2": 0.3554504203821872,
|
201 |
+
"scr_dir1_threshold_5": 0.20134242417211576,
|
202 |
+
"scr_metric_threshold_5": 0.43601894128039237,
|
203 |
+
"scr_dir2_threshold_5": 0.43601894128039237,
|
204 |
+
"scr_dir1_threshold_10": 0.2617451114206462,
|
205 |
+
"scr_metric_threshold_10": 0.4597155983076803,
|
206 |
+
"scr_dir2_threshold_10": 0.4597155983076803,
|
207 |
+
"scr_dir1_threshold_20": 0.16107369931906715,
|
208 |
+
"scr_metric_threshold_20": 0.47393370551862674,
|
209 |
+
"scr_dir2_threshold_20": 0.47393370551862674,
|
210 |
+
"scr_dir1_threshold_50": 0.29530211544415846,
|
211 |
+
"scr_metric_threshold_50": 0.691943176158823,
|
212 |
+
"scr_dir2_threshold_50": 0.691943176158823,
|
213 |
+
"scr_dir1_threshold_100": 0.41610748994121943,
|
214 |
+
"scr_metric_threshold_100": 0.7535545974243452,
|
215 |
+
"scr_dir2_threshold_100": 0.7535545974243452,
|
216 |
+
"scr_dir1_threshold_500": 0.20805374497060972,
|
217 |
+
"scr_metric_threshold_500": 0.7677724221488573,
|
218 |
+
"scr_dir2_threshold_500": 0.7677724221488573
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.24637664258293684,
|
223 |
+
"scr_metric_threshold_2": 0.09433952715525604,
|
224 |
+
"scr_dir2_threshold_2": 0.09433952715525604,
|
225 |
+
"scr_dir1_threshold_5": 0.36231889691679886,
|
226 |
+
"scr_metric_threshold_5": 0.11320748881710091,
|
227 |
+
"scr_dir2_threshold_5": 0.11320748881710091,
|
228 |
+
"scr_dir1_threshold_10": 0.5217388487493392,
|
229 |
+
"scr_metric_threshold_10": 0.23113189776117096,
|
230 |
+
"scr_dir2_threshold_10": 0.23113189776117096,
|
231 |
+
"scr_dir1_threshold_20": 0.5652174100833773,
|
232 |
+
"scr_metric_threshold_20": 0.4103773929717131,
|
233 |
+
"scr_dir2_threshold_20": 0.4103773929717131,
|
234 |
+
"scr_dir1_threshold_50": 0.5507244123328047,
|
235 |
+
"scr_metric_threshold_50": 0.570754645366442,
|
236 |
+
"scr_dir2_threshold_50": 0.570754645366442,
|
237 |
+
"scr_dir1_threshold_100": 0.5652174100833773,
|
238 |
+
"scr_metric_threshold_100": 0.6084905686901317,
|
239 |
+
"scr_dir2_threshold_100": 0.6084905686901317,
|
240 |
+
"scr_dir1_threshold_500": 0.5,
|
241 |
+
"scr_metric_threshold_500": 0.5613208051125037,
|
242 |
+
"scr_dir2_threshold_500": 0.5613208051125037
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.2283462719181943,
|
247 |
+
"scr_metric_threshold_2": 0.06425715501060796,
|
248 |
+
"scr_dir2_threshold_2": 0.06425715501060796,
|
249 |
+
"scr_dir1_threshold_5": 0.2755906842189001,
|
250 |
+
"scr_metric_threshold_5": 0.17269068518288452,
|
251 |
+
"scr_dir2_threshold_5": 0.17269068518288452,
|
252 |
+
"scr_dir1_threshold_10": 0.36220465789054995,
|
253 |
+
"scr_metric_threshold_10": 0.2409638525376377,
|
254 |
+
"scr_dir2_threshold_10": 0.2409638525376377,
|
255 |
+
"scr_dir1_threshold_20": 0.46456692543836386,
|
256 |
+
"scr_metric_threshold_20": 0.393574236623725,
|
257 |
+
"scr_dir2_threshold_20": 0.393574236623725,
|
258 |
+
"scr_dir1_threshold_50": 0.5826770175345555,
|
259 |
+
"scr_metric_threshold_50": 0.4538153792901877,
|
260 |
+
"scr_dir2_threshold_50": 0.4538153792901877,
|
261 |
+
"scr_dir1_threshold_100": 0.6220470482332859,
|
262 |
+
"scr_metric_threshold_100": 0.45783139163433295,
|
263 |
+
"scr_dir2_threshold_100": 0.45783139163433295,
|
264 |
+
"scr_dir1_threshold_500": -1.1811018596174885,
|
265 |
+
"scr_metric_threshold_500": 0.6265060644730722,
|
266 |
+
"scr_dir2_threshold_500": 0.6265060644730722
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.12121198985203961,
|
271 |
+
"scr_metric_threshold_2": 0.03333356512916492,
|
272 |
+
"scr_dir2_threshold_2": 0.03333356512916492,
|
273 |
+
"scr_dir1_threshold_5": 0.1575755145596066,
|
274 |
+
"scr_metric_threshold_5": 0.17222231604434454,
|
275 |
+
"scr_dir2_threshold_5": 0.17222231604434454,
|
276 |
+
"scr_dir1_threshold_10": 0.2848483928964277,
|
277 |
+
"scr_metric_threshold_10": 0.11111133186904595,
|
278 |
+
"scr_dir2_threshold_10": 0.11111133186904595,
|
279 |
+
"scr_dir1_threshold_20": 0.35151491506700466,
|
280 |
+
"scr_metric_threshold_20": 0.2055555500366072,
|
281 |
+
"scr_dir2_threshold_20": 0.2055555500366072,
|
282 |
+
"scr_dir1_threshold_50": 0.3636363307963432,
|
283 |
+
"scr_metric_threshold_50": 0.3055555831502974,
|
284 |
+
"scr_dir2_threshold_50": 0.3055555831502974,
|
285 |
+
"scr_dir1_threshold_100": 0.4121212712332488,
|
286 |
+
"scr_metric_threshold_100": 0.372222382271725,
|
287 |
+
"scr_dir2_threshold_100": 0.372222382271725,
|
288 |
+
"scr_dir1_threshold_500": -0.30909086311488054,
|
289 |
+
"scr_metric_threshold_500": 0.2555555665934523,
|
290 |
+
"scr_dir2_threshold_500": 0.2555555665934523
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAETrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_14",
|
296 |
+
"sae_lens_version": "5.3.0",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "gated",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_15_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 125,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": false,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "d00cc5fe-8ee0-4126-84b0-f234c2d008c5",
|
73 |
+
"datetime_epoch_millis": 1736483815725,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.28757982989161285,
|
77 |
+
"scr_metric_threshold_2": 0.10751737803417191,
|
78 |
+
"scr_dir2_threshold_2": 0.10751737803417191,
|
79 |
+
"scr_dir1_threshold_5": 0.2791551344018466,
|
80 |
+
"scr_metric_threshold_5": 0.18267142047455479,
|
81 |
+
"scr_dir2_threshold_5": 0.18267142047455479,
|
82 |
+
"scr_dir1_threshold_10": 0.26353423030120016,
|
83 |
+
"scr_metric_threshold_10": 0.21839308266607887,
|
84 |
+
"scr_dir2_threshold_10": 0.21839308266607887,
|
85 |
+
"scr_dir1_threshold_20": 0.2966192170580767,
|
86 |
+
"scr_metric_threshold_20": 0.2808313513664364,
|
87 |
+
"scr_dir2_threshold_20": 0.2808313513664364,
|
88 |
+
"scr_dir1_threshold_50": 0.2481317450699059,
|
89 |
+
"scr_metric_threshold_50": 0.35956264795323606,
|
90 |
+
"scr_dir2_threshold_50": 0.35956264795323606,
|
91 |
+
"scr_dir1_threshold_100": -0.001958568032063122,
|
92 |
+
"scr_metric_threshold_100": 0.40504448638971413,
|
93 |
+
"scr_dir2_threshold_100": 0.40504448638971413,
|
94 |
+
"scr_dir1_threshold_500": -0.4079902712707976,
|
95 |
+
"scr_metric_threshold_500": -0.07656992348531377,
|
96 |
+
"scr_dir2_threshold_500": -0.07656992348531377
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.49411737481627754,
|
103 |
+
"scr_metric_threshold_2": 0.016901357844105153,
|
104 |
+
"scr_dir2_threshold_2": 0.016901357844105153,
|
105 |
+
"scr_dir1_threshold_5": 0.6117649698751249,
|
106 |
+
"scr_metric_threshold_5": 0.030985850697592576,
|
107 |
+
"scr_dir2_threshold_5": 0.030985850697592576,
|
108 |
+
"scr_dir1_threshold_10": 0.5764706212343906,
|
109 |
+
"scr_metric_threshold_10": 0.08169009213030683,
|
110 |
+
"scr_dir2_threshold_10": 0.08169009213030683,
|
111 |
+
"scr_dir1_threshold_20": 0.435293927902254,
|
112 |
+
"scr_metric_threshold_20": 0.14929569140712623,
|
113 |
+
"scr_dir2_threshold_20": 0.14929569140712623,
|
114 |
+
"scr_dir1_threshold_50": 0.5882351703710352,
|
115 |
+
"scr_metric_threshold_50": -0.030986018597991372,
|
116 |
+
"scr_dir2_threshold_50": -0.030986018597991372,
|
117 |
+
"scr_dir1_threshold_100": 0.435293927902254,
|
118 |
+
"scr_metric_threshold_100": -0.005633897881634249,
|
119 |
+
"scr_dir2_threshold_100": -0.005633897881634249,
|
120 |
+
"scr_dir1_threshold_500": -0.10588234469140236,
|
121 |
+
"scr_metric_threshold_500": 0.033802715688210305,
|
122 |
+
"scr_dir2_threshold_500": 0.033802715688210305
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.6803280210277406,
|
127 |
+
"scr_metric_threshold_2": 0.10223643085128566,
|
128 |
+
"scr_dir2_threshold_2": 0.10223643085128566,
|
129 |
+
"scr_dir1_threshold_5": 0.762295170068692,
|
130 |
+
"scr_metric_threshold_5": 0.23642171063794296,
|
131 |
+
"scr_dir2_threshold_5": 0.23642171063794296,
|
132 |
+
"scr_dir1_threshold_10": 0.7868855102060758,
|
133 |
+
"scr_metric_threshold_10": 0.3258785638290478,
|
134 |
+
"scr_dir2_threshold_10": 0.3258785638290478,
|
135 |
+
"scr_dir1_threshold_20": 0.7786888930145298,
|
136 |
+
"scr_metric_threshold_20": 0.40575087659763764,
|
137 |
+
"scr_dir2_threshold_20": 0.40575087659763764,
|
138 |
+
"scr_dir1_threshold_50": 0.8360657019180974,
|
139 |
+
"scr_metric_threshold_50": 0.6006390074475332,
|
140 |
+
"scr_dir2_threshold_50": 0.6006390074475332,
|
141 |
+
"scr_dir1_threshold_100": 0.5327869573289298,
|
142 |
+
"scr_metric_threshold_100": 0.686901013831133,
|
143 |
+
"scr_dir2_threshold_100": 0.686901013831133,
|
144 |
+
"scr_dir1_threshold_500": -0.3114753617807135,
|
145 |
+
"scr_metric_threshold_500": -0.2875398308485054,
|
146 |
+
"scr_dir2_threshold_500": -0.2875398308485054
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.47126439931543734,
|
151 |
+
"scr_metric_threshold_2": 0.07602345704143464,
|
152 |
+
"scr_dir2_threshold_2": 0.07602345704143464,
|
153 |
+
"scr_dir1_threshold_5": 0.5402303890469973,
|
154 |
+
"scr_metric_threshold_5": 0.12865488005175496,
|
155 |
+
"scr_dir2_threshold_5": 0.12865488005175496,
|
156 |
+
"scr_dir1_threshold_10": 0.2988507952080613,
|
157 |
+
"scr_metric_threshold_10": 0.15789479187871047,
|
158 |
+
"scr_dir2_threshold_10": 0.15789479187871047,
|
159 |
+
"scr_dir1_threshold_20": 0.1954024957214833,
|
160 |
+
"scr_metric_threshold_20": 0.21637426696745518,
|
161 |
+
"scr_dir2_threshold_20": 0.21637426696745518,
|
162 |
+
"scr_dir1_threshold_50": 0.2873566919563886,
|
163 |
+
"scr_metric_threshold_50": 0.3040934796005722,
|
164 |
+
"scr_dir2_threshold_50": 0.3040934796005722,
|
165 |
+
"scr_dir1_threshold_100": -1.1954018106107214,
|
166 |
+
"scr_metric_threshold_100": 0.4181286651627242,
|
167 |
+
"scr_dir2_threshold_100": 0.4181286651627242,
|
168 |
+
"scr_dir1_threshold_500": -3.2873560068456267,
|
169 |
+
"scr_metric_threshold_500": -0.3333333914275277,
|
170 |
+
"scr_dir2_threshold_500": -0.3333333914275277
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.2999999254941785,
|
175 |
+
"scr_metric_threshold_2": 0.09963102309272855,
|
176 |
+
"scr_dir2_threshold_2": 0.09963102309272855,
|
177 |
+
"scr_dir1_threshold_5": 0.36249987892804003,
|
178 |
+
"scr_metric_threshold_5": 0.16974169417057836,
|
179 |
+
"scr_dir2_threshold_5": 0.16974169417057836,
|
180 |
+
"scr_dir1_threshold_10": 0.18124993946402002,
|
181 |
+
"scr_metric_threshold_10": 0.2546125412558675,
|
182 |
+
"scr_dir2_threshold_10": 0.2546125412558675,
|
183 |
+
"scr_dir1_threshold_20": 0.3750000931322769,
|
184 |
+
"scr_metric_threshold_20": 0.39483388341156717,
|
185 |
+
"scr_dir2_threshold_20": 0.39483388341156717,
|
186 |
+
"scr_dir1_threshold_50": -0.22500013038518765,
|
187 |
+
"scr_metric_threshold_50": 0.535055445510583,
|
188 |
+
"scr_dir2_threshold_50": 0.535055445510583,
|
189 |
+
"scr_dir1_threshold_100": -0.30000029802328604,
|
190 |
+
"scr_metric_threshold_100": 0.690036963673722,
|
191 |
+
"scr_dir2_threshold_100": 0.690036963673722,
|
192 |
+
"scr_dir1_threshold_500": -0.01875013504180149,
|
193 |
+
"scr_metric_threshold_500": -0.6678965896909048,
|
194 |
+
"scr_dir2_threshold_500": -0.6678965896909048
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.060402687248530484,
|
199 |
+
"scr_metric_threshold_2": 0.38388635231764584,
|
200 |
+
"scr_dir2_threshold_2": 0.38388635231764584,
|
201 |
+
"scr_dir1_threshold_5": -0.8724829046734026,
|
202 |
+
"scr_metric_threshold_5": 0.4597155983076803,
|
203 |
+
"scr_dir2_threshold_5": 0.4597155983076803,
|
204 |
+
"scr_dir1_threshold_10": -0.8187915382233661,
|
205 |
+
"scr_metric_threshold_10": 0.36018969529035794,
|
206 |
+
"scr_dir2_threshold_10": 0.36018969529035794,
|
207 |
+
"scr_dir1_threshold_20": -0.7382548885793537,
|
208 |
+
"scr_metric_threshold_20": 0.41232228425310447,
|
209 |
+
"scr_dir2_threshold_20": 0.41232228425310447,
|
210 |
+
"scr_dir1_threshold_50": -0.7919458549983479,
|
211 |
+
"scr_metric_threshold_50": 0.7061612833697694,
|
212 |
+
"scr_dir2_threshold_50": 0.7061612833697694,
|
213 |
+
"scr_dir1_threshold_100": -0.6040264724542624,
|
214 |
+
"scr_metric_threshold_100": 0.7772512544516331,
|
215 |
+
"scr_dir2_threshold_100": 0.7772512544516331,
|
216 |
+
"scr_dir1_threshold_500": -0.20805334493956731,
|
217 |
+
"scr_metric_threshold_500": 0.6966824510669937,
|
218 |
+
"scr_dir2_threshold_500": 0.6966824510669937
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.15217366891609393,
|
223 |
+
"scr_metric_threshold_2": 0.08018848562038029,
|
224 |
+
"scr_dir2_threshold_2": 0.08018848562038029,
|
225 |
+
"scr_dir1_threshold_5": 0.3188403355827606,
|
226 |
+
"scr_metric_threshold_5": 0.14622621085985318,
|
227 |
+
"scr_dir2_threshold_5": 0.14622621085985318,
|
228 |
+
"scr_dir1_threshold_10": 0.4275363070001762,
|
229 |
+
"scr_metric_threshold_10": 0.22169805750723268,
|
230 |
+
"scr_dir2_threshold_10": 0.22169805750723268,
|
231 |
+
"scr_dir1_threshold_20": 0.5434781294163583,
|
232 |
+
"scr_metric_threshold_20": 0.33018862619736444,
|
233 |
+
"scr_dir2_threshold_20": 0.33018862619736444,
|
234 |
+
"scr_dir1_threshold_50": 0.5652174100833773,
|
235 |
+
"scr_metric_threshold_50": 0.35377350798617846,
|
236 |
+
"scr_dir2_threshold_50": 0.35377350798617846,
|
237 |
+
"scr_dir1_threshold_100": 0.615941822416182,
|
238 |
+
"scr_metric_threshold_100": 0.5566036038315663,
|
239 |
+
"scr_dir2_threshold_100": 0.5566036038315663,
|
240 |
+
"scr_dir1_threshold_500": -0.02173928066701911,
|
241 |
+
"scr_metric_threshold_500": 0.5613208051125037,
|
242 |
+
"scr_dir2_threshold_500": 0.5613208051125037
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.11811009209619154,
|
247 |
+
"scr_metric_threshold_2": 0.04016060219359444,
|
248 |
+
"scr_dir2_threshold_2": 0.04016060219359444,
|
249 |
+
"scr_dir1_threshold_5": 0.34645683334217237,
|
250 |
+
"scr_metric_threshold_5": 0.12851407064514483,
|
251 |
+
"scr_dir2_threshold_5": 0.12851407064514483,
|
252 |
+
"scr_dir1_threshold_10": 0.4803147499867415,
|
253 |
+
"scr_metric_threshold_10": 0.12851407064514483,
|
254 |
+
"scr_dir2_threshold_10": 0.12851407064514483,
|
255 |
+
"scr_dir1_threshold_20": 0.4803147499867415,
|
256 |
+
"scr_metric_threshold_20": 0.24899611660199922,
|
257 |
+
"scr_dir2_threshold_20": 0.24899611660199922,
|
258 |
+
"scr_dir1_threshold_50": 0.5433069868358249,
|
259 |
+
"scr_metric_threshold_50": 0.38554221193543453,
|
260 |
+
"scr_dir2_threshold_50": 0.38554221193543453,
|
261 |
+
"scr_dir1_threshold_100": 0.43307080701382217,
|
262 |
+
"scr_metric_threshold_100": 0.43373483881731945,
|
263 |
+
"scr_dir2_threshold_100": 0.43373483881731945,
|
264 |
+
"scr_dir1_threshold_500": 0.5984253114107195,
|
265 |
+
"scr_metric_threshold_500": 0.1566266358063036,
|
266 |
+
"scr_dir2_threshold_500": 0.1566266358063036
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.0242424702184528,
|
271 |
+
"scr_metric_threshold_2": 0.06111131531220084,
|
272 |
+
"scr_dir2_threshold_2": 0.06111131531220084,
|
273 |
+
"scr_dir1_threshold_5": 0.16363640304438812,
|
274 |
+
"scr_metric_threshold_5": 0.16111134842589106,
|
275 |
+
"scr_dir2_threshold_5": 0.16111134842589106,
|
276 |
+
"scr_dir1_threshold_10": 0.17575745753350233,
|
277 |
+
"scr_metric_threshold_10": 0.2166668487919629,
|
278 |
+
"scr_dir2_threshold_10": 0.2166668487919629,
|
279 |
+
"scr_dir1_threshold_20": 0.3030303358703234,
|
280 |
+
"scr_metric_threshold_20": 0.08888906549523676,
|
281 |
+
"scr_dir2_threshold_20": 0.08888906549523676,
|
282 |
+
"scr_dir1_threshold_50": 0.18181798477805944,
|
283 |
+
"scr_metric_threshold_50": 0.02222226637380919,
|
284 |
+
"scr_dir2_threshold_50": 0.02222226637380919,
|
285 |
+
"scr_dir1_threshold_100": 0.0666665221705769,
|
286 |
+
"scr_metric_threshold_100": -0.3166665507687509,
|
287 |
+
"scr_dir2_threshold_100": -0.3166665507687509,
|
288 |
+
"scr_dir1_threshold_500": 0.09090899238902972,
|
289 |
+
"scr_metric_threshold_500": -0.7722221835895836,
|
290 |
+
"scr_dir2_threshold_500": -0.7722221835895836
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAETrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_15",
|
296 |
+
"sae_lens_version": "5.3.0",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "gated",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_16_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 125,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": false,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "f6c8975b-3f28-4c1c-b415-fcfaccd696e2",
|
73 |
+
"datetime_epoch_millis": 1736483960501,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.30832928774665064,
|
77 |
+
"scr_metric_threshold_2": 0.13517142313867364,
|
78 |
+
"scr_dir2_threshold_2": 0.13517142313867364,
|
79 |
+
"scr_dir1_threshold_5": 0.3442244458388896,
|
80 |
+
"scr_metric_threshold_5": 0.1851152312353862,
|
81 |
+
"scr_dir2_threshold_5": 0.1851152312353862,
|
82 |
+
"scr_dir1_threshold_10": 0.2854095650675326,
|
83 |
+
"scr_metric_threshold_10": 0.22572898695392946,
|
84 |
+
"scr_dir2_threshold_10": 0.22572898695392946,
|
85 |
+
"scr_dir1_threshold_20": 0.3312863341440525,
|
86 |
+
"scr_metric_threshold_20": 0.2703892753309681,
|
87 |
+
"scr_dir2_threshold_20": 0.2703892753309681,
|
88 |
+
"scr_dir1_threshold_50": 0.28045349475064696,
|
89 |
+
"scr_metric_threshold_50": 0.33770788199293783,
|
90 |
+
"scr_dir2_threshold_50": 0.33770788199293783,
|
91 |
+
"scr_dir1_threshold_100": -0.08020961470151253,
|
92 |
+
"scr_metric_threshold_100": 0.2641720071102462,
|
93 |
+
"scr_dir2_threshold_100": 0.2641720071102462,
|
94 |
+
"scr_dir1_threshold_500": -0.7535685608928296,
|
95 |
+
"scr_metric_threshold_500": -0.07348461852423942,
|
96 |
+
"scr_dir2_threshold_500": -0.07348461852423942
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.5647060720977459,
|
103 |
+
"scr_metric_threshold_2": 0.022535087825340604,
|
104 |
+
"scr_dir2_threshold_2": 0.022535087825340604,
|
105 |
+
"scr_dir1_threshold_5": 0.5999997195076798,
|
106 |
+
"scr_metric_threshold_5": 0.033802715688210305,
|
107 |
+
"scr_dir2_threshold_5": 0.033802715688210305,
|
108 |
+
"scr_dir1_threshold_10": 0.5647060720977459,
|
109 |
+
"scr_metric_threshold_10": 0.10985907783728167,
|
110 |
+
"scr_dir2_threshold_10": 0.10985907783728167,
|
111 |
+
"scr_dir1_threshold_20": 0.49411737481627754,
|
112 |
+
"scr_metric_threshold_20": 0.1014084828654285,
|
113 |
+
"scr_dir2_threshold_20": 0.1014084828654285,
|
114 |
+
"scr_dir1_threshold_50": 0.435293927902254,
|
115 |
+
"scr_metric_threshold_50": -0.05352110642333198,
|
116 |
+
"scr_dir2_threshold_50": -0.05352110642333198,
|
117 |
+
"scr_dir1_threshold_100": 0.435293927902254,
|
118 |
+
"scr_metric_threshold_100": -0.04788737644209653,
|
119 |
+
"scr_dir2_threshold_100": -0.04788737644209653,
|
120 |
+
"scr_dir1_threshold_500": 0.12941214419549193,
|
121 |
+
"scr_metric_threshold_500": 0.022535087825340604,
|
122 |
+
"scr_dir2_threshold_500": 0.022535087825340604
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.7049183611651244,
|
127 |
+
"scr_metric_threshold_2": 0.1469648574468381,
|
128 |
+
"scr_dir2_threshold_2": 0.1469648574468381,
|
129 |
+
"scr_dir1_threshold_5": 0.6557376808903568,
|
130 |
+
"scr_metric_threshold_5": 0.21405759255524712,
|
131 |
+
"scr_dir2_threshold_5": 0.21405759255524712,
|
132 |
+
"scr_dir1_threshold_10": 0.622950723561427,
|
133 |
+
"scr_metric_threshold_10": 0.3514377191494094,
|
134 |
+
"scr_dir2_threshold_10": 0.3514377191494094,
|
135 |
+
"scr_dir1_threshold_20": 0.6721314038361946,
|
136 |
+
"scr_metric_threshold_20": 0.41533560745031345,
|
137 |
+
"scr_dir2_threshold_20": 0.41533560745031345,
|
138 |
+
"scr_dir1_threshold_50": 0.6885246382192866,
|
139 |
+
"scr_metric_threshold_50": 0.6293930095753998,
|
140 |
+
"scr_dir2_threshold_50": 0.6293930095753998,
|
141 |
+
"scr_dir1_threshold_100": 0.5819671490409514,
|
142 |
+
"scr_metric_threshold_100": 0.6645367053182764,
|
143 |
+
"scr_dir2_threshold_100": 0.6645367053182764,
|
144 |
+
"scr_dir1_threshold_500": -1.4426231910964324,
|
145 |
+
"scr_metric_threshold_500": -0.3258785638290478,
|
146 |
+
"scr_dir2_threshold_500": -0.3258785638290478
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.4827591876778719,
|
151 |
+
"scr_metric_threshold_2": 0.07894739593935524,
|
152 |
+
"scr_dir2_threshold_2": 0.07894739593935524,
|
153 |
+
"scr_dir1_threshold_5": 0.5287356006845627,
|
154 |
+
"scr_metric_threshold_5": 0.11403501127956878,
|
155 |
+
"scr_dir2_threshold_5": 0.11403501127956878,
|
156 |
+
"scr_dir1_threshold_10": 0.47126439931543734,
|
157 |
+
"scr_metric_threshold_10": 0.1461988620044449,
|
158 |
+
"scr_dir2_threshold_10": 0.1461988620044449,
|
159 |
+
"scr_dir1_threshold_20": 0.40229909469463926,
|
160 |
+
"scr_metric_threshold_20": 0.1871345294230828,
|
161 |
+
"scr_dir2_threshold_20": 0.1871345294230828,
|
162 |
+
"scr_dir1_threshold_50": 0.03448299486577995,
|
163 |
+
"scr_metric_threshold_50": 0.3362573303254483,
|
164 |
+
"scr_dir2_threshold_50": 0.3362573303254483,
|
165 |
+
"scr_dir1_threshold_100": -2.068965304620798,
|
166 |
+
"scr_metric_threshold_100": 0.12865488005175496,
|
167 |
+
"scr_dir2_threshold_100": 0.12865488005175496,
|
168 |
+
"scr_dir1_threshold_500": -3.632183215060379,
|
169 |
+
"scr_metric_threshold_500": -0.31871352265534153,
|
170 |
+
"scr_dir2_threshold_500": -0.31871352265534153
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.3750000931322769,
|
175 |
+
"scr_metric_threshold_2": 0.10701122106810633,
|
176 |
+
"scr_dir2_threshold_2": 0.10701122106810633,
|
177 |
+
"scr_dir1_threshold_5": 0.33749982304867393,
|
178 |
+
"scr_metric_threshold_5": 0.13284136412363806,
|
179 |
+
"scr_dir2_threshold_5": 0.13284136412363806,
|
180 |
+
"scr_dir1_threshold_10": 0.4124999906867723,
|
181 |
+
"scr_metric_threshold_10": 0.16974169417057836,
|
182 |
+
"scr_dir2_threshold_10": 0.16974169417057836,
|
183 |
+
"scr_dir1_threshold_20": 0.36249987892804003,
|
184 |
+
"scr_metric_threshold_20": 0.3394833883411567,
|
185 |
+
"scr_dir2_threshold_20": 0.3394833883411567,
|
186 |
+
"scr_dir1_threshold_50": 0.4124999906867723,
|
187 |
+
"scr_metric_threshold_50": 0.5055350934957042,
|
188 |
+
"scr_dir2_threshold_50": 0.5055350934957042,
|
189 |
+
"scr_dir1_threshold_100": -0.05625003259629691,
|
190 |
+
"scr_metric_threshold_100": 0.6162360836365252,
|
191 |
+
"scr_dir2_threshold_100": 0.6162360836365252,
|
192 |
+
"scr_dir1_threshold_500": -0.2812501629814846,
|
193 |
+
"scr_metric_threshold_500": -0.22140220022495802,
|
194 |
+
"scr_dir2_threshold_500": -0.22140220022495802
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.08053704967505479,
|
199 |
+
"scr_metric_threshold_2": 0.4265403914640509,
|
200 |
+
"scr_dir2_threshold_2": 0.4265403914640509,
|
201 |
+
"scr_dir1_threshold_5": 0.12080537449706097,
|
202 |
+
"scr_metric_threshold_5": 0.4928910876377439,
|
203 |
+
"scr_dir2_threshold_5": 0.4928910876377439,
|
204 |
+
"scr_dir1_threshold_10": -0.6174495140822928,
|
205 |
+
"scr_metric_threshold_10": 0.49763036254591464,
|
206 |
+
"scr_dir2_threshold_10": 0.49763036254591464,
|
207 |
+
"scr_dir1_threshold_20": -0.422818410708671,
|
208 |
+
"scr_metric_threshold_20": 0.5687203336277784,
|
209 |
+
"scr_dir2_threshold_20": 0.5687203336277784,
|
210 |
+
"scr_dir1_threshold_50": -0.4966441395852318,
|
211 |
+
"scr_metric_threshold_50": 0.47867298042679746,
|
212 |
+
"scr_dir2_threshold_50": 0.47867298042679746,
|
213 |
+
"scr_dir1_threshold_100": -0.5167781019807137,
|
214 |
+
"scr_metric_threshold_100": 0.199052088521079,
|
215 |
+
"scr_dir2_threshold_100": 0.199052088521079,
|
216 |
+
"scr_dir1_threshold_500": -1.429529731507165,
|
217 |
+
"scr_metric_threshold_500": 0.06161142126552228,
|
218 |
+
"scr_dir2_threshold_500": 0.06161142126552228
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.17391294958311307,
|
223 |
+
"scr_metric_threshold_2": 0.14622621085985318,
|
224 |
+
"scr_dir2_threshold_2": 0.14622621085985318,
|
225 |
+
"scr_dir1_threshold_5": 0.2681159232499559,
|
226 |
+
"scr_metric_threshold_5": 0.1981131757184187,
|
227 |
+
"scr_dir2_threshold_5": 0.1981131757184187,
|
228 |
+
"scr_dir1_threshold_10": 0.37681146274969163,
|
229 |
+
"scr_metric_threshold_10": 0.3160375846624887,
|
230 |
+
"scr_dir2_threshold_10": 0.3160375846624887,
|
231 |
+
"scr_dir1_threshold_20": 0.5289855635834655,
|
232 |
+
"scr_metric_threshold_20": 0.26415090095789157,
|
233 |
+
"scr_dir2_threshold_20": 0.26415090095789157,
|
234 |
+
"scr_dir1_threshold_50": 0.4637681535000881,
|
235 |
+
"scr_metric_threshold_50": 0.4056601916907756,
|
236 |
+
"scr_dir2_threshold_50": 0.4056601916907756,
|
237 |
+
"scr_dir1_threshold_100": 0.05072441233280467,
|
238 |
+
"scr_metric_threshold_100": 0.48584895846512427,
|
239 |
+
"scr_dir2_threshold_100": 0.48584895846512427,
|
240 |
+
"scr_dir1_threshold_500": 0.19565223025013218,
|
241 |
+
"scr_metric_threshold_500": 0.4292450734795896,
|
242 |
+
"scr_dir2_threshold_500": 0.4292450734795896
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.07874006139746104,
|
247 |
+
"scr_metric_threshold_2": 0.06425715501060796,
|
248 |
+
"scr_dir2_threshold_2": 0.06425715501060796,
|
249 |
+
"scr_dir1_threshold_5": 0.21259844736981667,
|
250 |
+
"scr_metric_threshold_5": 0.12851407064514483,
|
251 |
+
"scr_dir2_threshold_5": 0.12851407064514483,
|
252 |
+
"scr_dir1_threshold_10": 0.38582686404090283,
|
253 |
+
"scr_metric_threshold_10": 0.12048204595685438,
|
254 |
+
"scr_dir2_threshold_10": 0.12048204595685438,
|
255 |
+
"scr_dir1_threshold_20": 0.42519689473963335,
|
256 |
+
"scr_metric_threshold_20": 0.16465866049459404,
|
257 |
+
"scr_dir2_threshold_20": 0.16465866049459404,
|
258 |
+
"scr_dir1_threshold_50": 0.5905509298087442,
|
259 |
+
"scr_metric_threshold_50": 0.30522100754824566,
|
260 |
+
"scr_dir2_threshold_50": 0.30522100754824566,
|
261 |
+
"scr_dir1_threshold_100": 0.6535431666578276,
|
262 |
+
"scr_metric_threshold_100": 0.2891567187955937,
|
263 |
+
"scr_dir2_threshold_100": 0.2891567187955937,
|
264 |
+
"scr_dir1_threshold_500": 0.2440945657943584,
|
265 |
+
"scr_metric_threshold_500": 0.23694784019349246,
|
266 |
+
"scr_dir2_threshold_500": 0.23694784019349246
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.006060527244557103,
|
271 |
+
"scr_metric_threshold_2": 0.08888906549523676,
|
272 |
+
"scr_dir2_threshold_2": 0.08888906549523676,
|
273 |
+
"scr_dir1_threshold_5": 0.030302997463009903,
|
274 |
+
"scr_metric_threshold_5": 0.1666668322351178,
|
275 |
+
"scr_dir2_threshold_5": 0.1666668322351178,
|
276 |
+
"scr_dir1_threshold_10": 0.0666665221705769,
|
277 |
+
"scr_metric_threshold_10": 0.0944445493044635,
|
278 |
+
"scr_dir2_threshold_10": 0.0944445493044635,
|
279 |
+
"scr_dir1_threshold_20": 0.18787887326284092,
|
280 |
+
"scr_metric_threshold_20": 0.12222229948749942,
|
281 |
+
"scr_dir2_threshold_20": 0.12222229948749942,
|
282 |
+
"scr_dir1_threshold_50": 0.11515146260748252,
|
283 |
+
"scr_metric_threshold_50": 0.0944445493044635,
|
284 |
+
"scr_dir2_threshold_50": 0.0944445493044635,
|
285 |
+
"scr_dir1_threshold_100": 0.27878786565187064,
|
286 |
+
"scr_metric_threshold_100": -0.2222220014642874,
|
287 |
+
"scr_dir2_threshold_100": -0.2222220014642874,
|
288 |
+
"scr_dir1_threshold_500": 0.18787887326284092,
|
289 |
+
"scr_metric_threshold_500": -0.472222084248513,
|
290 |
+
"scr_dir2_threshold_500": -0.472222084248513
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAETrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_16",
|
296 |
+
"sae_lens_version": "5.3.0",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "gated",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_17_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 125,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": false,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "b394802f-e7f3-46c8-a044-b0ebfe990469",
|
73 |
+
"datetime_epoch_millis": 1736484107911,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.28874663540323486,
|
77 |
+
"scr_metric_threshold_2": 0.11187606433049901,
|
78 |
+
"scr_dir2_threshold_2": 0.11187606433049901,
|
79 |
+
"scr_dir1_threshold_5": 0.18044327040419328,
|
80 |
+
"scr_metric_threshold_5": 0.17411905065837205,
|
81 |
+
"scr_dir2_threshold_5": 0.17411905065837205,
|
82 |
+
"scr_dir1_threshold_10": 0.22893282656392694,
|
83 |
+
"scr_metric_threshold_10": 0.18621413110725155,
|
84 |
+
"scr_dir2_threshold_10": 0.18621413110725155,
|
85 |
+
"scr_dir1_threshold_20": 0.22376201410679103,
|
86 |
+
"scr_metric_threshold_20": 0.26678207641472246,
|
87 |
+
"scr_dir2_threshold_20": 0.26678207641472246,
|
88 |
+
"scr_dir1_threshold_50": 0.19033698030907797,
|
89 |
+
"scr_metric_threshold_50": 0.29147901176702884,
|
90 |
+
"scr_dir2_threshold_50": 0.29147901176702884,
|
91 |
+
"scr_dir1_threshold_100": -0.19264996393058503,
|
92 |
+
"scr_metric_threshold_100": 0.27617109397761797,
|
93 |
+
"scr_dir2_threshold_100": 0.27617109397761797,
|
94 |
+
"scr_dir1_threshold_500": -0.5332032119309518,
|
95 |
+
"scr_metric_threshold_500": 0.04724825260492346,
|
96 |
+
"scr_dir2_threshold_500": 0.04724825260492346
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.5529415229611013,
|
103 |
+
"scr_metric_threshold_2": 0.022535087825340604,
|
104 |
+
"scr_dir2_threshold_2": 0.022535087825340604,
|
105 |
+
"scr_dir1_threshold_5": 0.5882351703710352,
|
106 |
+
"scr_metric_threshold_5": 0.030985850697592576,
|
107 |
+
"scr_dir2_threshold_5": 0.030985850697592576,
|
108 |
+
"scr_dir1_threshold_10": 0.5999997195076798,
|
109 |
+
"scr_metric_threshold_10": 0.07605636214907138,
|
110 |
+
"scr_dir2_threshold_10": 0.07605636214907138,
|
111 |
+
"scr_dir1_threshold_20": 0.2705881362968282,
|
112 |
+
"scr_metric_threshold_20": 0.16901408214224792,
|
113 |
+
"scr_dir2_threshold_20": 0.16901408214224792,
|
114 |
+
"scr_dir1_threshold_50": 0.5058826251837225,
|
115 |
+
"scr_metric_threshold_50": 0.10704221284666396,
|
116 |
+
"scr_dir2_threshold_50": 0.10704221284666396,
|
117 |
+
"scr_dir1_threshold_100": 0.45882372740634364,
|
118 |
+
"scr_metric_threshold_100": -0.06197186929558395,
|
119 |
+
"scr_dir2_threshold_100": -0.06197186929558395,
|
120 |
+
"scr_dir1_threshold_500": 0.317647034074207,
|
121 |
+
"scr_metric_threshold_500": 0.1577464542793782,
|
122 |
+
"scr_dir2_threshold_500": 0.1577464542793782
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.6639342980819026,
|
127 |
+
"scr_metric_threshold_2": 0.08945685319110486,
|
128 |
+
"scr_dir2_threshold_2": 0.08945685319110486,
|
129 |
+
"scr_dir1_threshold_5": 0.6475410636988108,
|
130 |
+
"scr_metric_threshold_5": 0.23642171063794296,
|
131 |
+
"scr_dir2_threshold_5": 0.23642171063794296,
|
132 |
+
"scr_dir1_threshold_10": 0.631147829315719,
|
133 |
+
"scr_metric_threshold_10": 0.30031959893884697,
|
134 |
+
"scr_dir2_threshold_10": 0.30031959893884697,
|
135 |
+
"scr_dir1_threshold_20": 0.6721314038361946,
|
136 |
+
"scr_metric_threshold_20": 0.44089457234051427,
|
137 |
+
"scr_dir2_threshold_20": 0.44089457234051427,
|
138 |
+
"scr_dir1_threshold_50": 0.631147829315719,
|
139 |
+
"scr_metric_threshold_50": 0.5718850053196666,
|
140 |
+
"scr_dir2_threshold_50": 0.5718850053196666,
|
141 |
+
"scr_dir1_threshold_100": 0.614754106369881,
|
142 |
+
"scr_metric_threshold_100": 0.6293930095753998,
|
143 |
+
"scr_dir2_threshold_100": 0.6293930095753998,
|
144 |
+
"scr_dir1_threshold_500": -1.2950821273976216,
|
145 |
+
"scr_metric_threshold_500": -0.3258785638290478,
|
146 |
+
"scr_dir2_threshold_500": -0.3258785638290478
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.413793197946312,
|
151 |
+
"scr_metric_threshold_2": 0.05847947508874469,
|
152 |
+
"scr_dir2_threshold_2": 0.05847947508874469,
|
153 |
+
"scr_dir1_threshold_5": 0.3793108881912939,
|
154 |
+
"scr_metric_threshold_5": 0.10233925568788639,
|
155 |
+
"scr_dir2_threshold_5": 0.10233925568788639,
|
156 |
+
"scr_dir1_threshold_10": 0.3793108881912939,
|
157 |
+
"scr_metric_threshold_10": 0.16081873077663109,
|
158 |
+
"scr_dir2_threshold_10": 0.16081873077663109,
|
159 |
+
"scr_dir1_threshold_20": 0.40229909469463926,
|
160 |
+
"scr_metric_threshold_20": 0.21345032806953457,
|
161 |
+
"scr_dir2_threshold_20": 0.21345032806953457,
|
162 |
+
"scr_dir1_threshold_50": 0.40229909469463926,
|
163 |
+
"scr_metric_threshold_50": 0.15497067869820672,
|
164 |
+
"scr_dir2_threshold_50": 0.15497067869820672,
|
165 |
+
"scr_dir1_threshold_100": -2.2643671152315195,
|
166 |
+
"scr_metric_threshold_100": 0.26900586426035866,
|
167 |
+
"scr_dir2_threshold_100": 0.26900586426035866,
|
168 |
+
"scr_dir1_threshold_500": -2.8390798140335347,
|
169 |
+
"scr_metric_threshold_500": -0.27192980315827925,
|
170 |
+
"scr_dir2_threshold_500": -0.27192980315827925
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.2999999254941785,
|
175 |
+
"scr_metric_threshold_2": 0.10701122106810633,
|
176 |
+
"scr_dir2_threshold_2": 0.10701122106810633,
|
177 |
+
"scr_dir1_threshold_5": 0.29375000465661383,
|
178 |
+
"scr_metric_threshold_5": 0.13653135313966885,
|
179 |
+
"scr_dir2_threshold_5": 0.13653135313966885,
|
180 |
+
"scr_dir1_threshold_10": 0.3874999348074062,
|
181 |
+
"scr_metric_threshold_10": 0.2324723872163666,
|
182 |
+
"scr_dir2_threshold_10": 0.2324723872163666,
|
183 |
+
"scr_dir1_threshold_20": 0.4312501257285738,
|
184 |
+
"scr_metric_threshold_20": 0.2546125412558675,
|
185 |
+
"scr_dir2_threshold_20": 0.2546125412558675,
|
186 |
+
"scr_dir1_threshold_50": 0.062499953433861555,
|
187 |
+
"scr_metric_threshold_50": 0.3985240923709142,
|
188 |
+
"scr_dir2_threshold_50": 0.3985240923709142,
|
189 |
+
"scr_dir1_threshold_100": 0.03749989755449542,
|
190 |
+
"scr_metric_threshold_100": 0.5571955995500839,
|
191 |
+
"scr_dir2_threshold_100": 0.5571955995500839,
|
192 |
+
"scr_dir1_threshold_500": 0.36249987892804003,
|
193 |
+
"scr_metric_threshold_500": 0.5904059405809935,
|
194 |
+
"scr_dir2_threshold_500": 0.5904059405809935
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.03355700402351222,
|
199 |
+
"scr_metric_threshold_2": 0.3791470774094751,
|
200 |
+
"scr_dir2_threshold_2": 0.3791470774094751,
|
201 |
+
"scr_dir1_threshold_5": -0.9798652375424333,
|
202 |
+
"scr_metric_threshold_5": 0.45497632339950955,
|
203 |
+
"scr_dir2_threshold_5": 0.45497632339950955,
|
204 |
+
"scr_dir1_threshold_10": -0.9127512294954089,
|
205 |
+
"scr_metric_threshold_10": 0.3507108629875822,
|
206 |
+
"scr_dir2_threshold_10": 0.3507108629875822,
|
207 |
+
"scr_dir1_threshold_20": -0.7651001717733296,
|
208 |
+
"scr_metric_threshold_20": 0.469194430610456,
|
209 |
+
"scr_dir2_threshold_20": 0.469194430610456,
|
210 |
+
"scr_dir1_threshold_50": -0.7919458549983479,
|
211 |
+
"scr_metric_threshold_50": 0.4170615591612752,
|
212 |
+
"scr_dir2_threshold_50": 0.4170615591612752,
|
213 |
+
"scr_dir1_threshold_100": -1.2147646657380613,
|
214 |
+
"scr_metric_threshold_100": 0.36492897019852866,
|
215 |
+
"scr_dir2_threshold_100": 0.36492897019852866,
|
216 |
+
"scr_dir1_threshold_500": -1.4496640939336893,
|
217 |
+
"scr_metric_threshold_500": 0.137440949741991,
|
218 |
+
"scr_dir2_threshold_500": 0.137440949741991
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.23188407675004405,
|
223 |
+
"scr_metric_threshold_2": 0.10377364856316265,
|
224 |
+
"scr_dir2_threshold_2": 0.10377364856316265,
|
225 |
+
"scr_dir1_threshold_5": 0.2898547719992951,
|
226 |
+
"scr_metric_threshold_5": 0.1556603322677598,
|
227 |
+
"scr_dir2_threshold_5": 0.1556603322677598,
|
228 |
+
"scr_dir1_threshold_10": 0.39130446050026435,
|
229 |
+
"scr_metric_threshold_10": 0.18867905431051207,
|
230 |
+
"scr_dir2_threshold_10": 0.18867905431051207,
|
231 |
+
"scr_dir1_threshold_20": 0.21739107899947133,
|
232 |
+
"scr_metric_threshold_20": 0.3113206645355196,
|
233 |
+
"scr_dir2_threshold_20": 0.3113206645355196,
|
234 |
+
"scr_dir1_threshold_50": 0.23913035966649043,
|
235 |
+
"scr_metric_threshold_50": 0.45283023642237197,
|
236 |
+
"scr_dir2_threshold_50": 0.45283023642237197,
|
237 |
+
"scr_dir1_threshold_100": 0.20289851316657856,
|
238 |
+
"scr_metric_threshold_100": 0.3820753099019616,
|
239 |
+
"scr_dir2_threshold_100": 0.3820753099019616,
|
240 |
+
"scr_dir1_threshold_500": 0.12318853725030839,
|
241 |
+
"scr_metric_threshold_500": 0.3160375846624887,
|
242 |
+
"scr_dir2_threshold_500": 0.3160375846624887
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.12598447369816684,
|
247 |
+
"scr_metric_threshold_2": 0.04016060219359444,
|
248 |
+
"scr_dir2_threshold_2": 0.04016060219359444,
|
249 |
+
"scr_dir1_threshold_5": 0.21259844736981667,
|
250 |
+
"scr_metric_threshold_5": 0.12048204595685438,
|
251 |
+
"scr_dir2_threshold_5": 0.12048204595685438,
|
252 |
+
"scr_dir1_threshold_10": 0.33070853946600826,
|
253 |
+
"scr_metric_threshold_10": 0.10843376954834762,
|
254 |
+
"scr_dir2_threshold_10": 0.10843376954834762,
|
255 |
+
"scr_dir1_threshold_20": 0.46456692543836386,
|
256 |
+
"scr_metric_threshold_20": 0.16465866049459404,
|
257 |
+
"scr_dir2_threshold_20": 0.16465866049459404,
|
258 |
+
"scr_dir1_threshold_50": 0.42519689473963335,
|
259 |
+
"scr_metric_threshold_50": 0.21285152675255,
|
260 |
+
"scr_dir2_threshold_50": 0.21285152675255,
|
261 |
+
"scr_dir1_threshold_100": 0.4724408377125527,
|
262 |
+
"scr_metric_threshold_100": 0.2409638525376377,
|
263 |
+
"scr_dir2_threshold_100": 0.2409638525376377,
|
264 |
+
"scr_dir1_threshold_500": 0.4724408377125527,
|
265 |
+
"scr_metric_threshold_500": 0.09638549313984086,
|
266 |
+
"scr_dir2_threshold_500": 0.09638549313984086
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": -0.012121415729338595,
|
271 |
+
"scr_metric_threshold_2": 0.0944445493044635,
|
272 |
+
"scr_dir2_threshold_2": 0.0944445493044635,
|
273 |
+
"scr_dir1_threshold_5": 0.012121054489114207,
|
274 |
+
"scr_metric_threshold_5": 0.15555553347976206,
|
275 |
+
"scr_dir2_threshold_5": 0.15555553347976206,
|
276 |
+
"scr_dir1_threshold_10": 0.0242424702184528,
|
277 |
+
"scr_metric_threshold_10": 0.0722222829306543,
|
278 |
+
"scr_dir2_threshold_10": 0.0722222829306543,
|
279 |
+
"scr_dir1_threshold_20": 0.09696951963358681,
|
280 |
+
"scr_metric_threshold_20": 0.11111133186904595,
|
281 |
+
"scr_dir2_threshold_20": 0.11111133186904595,
|
282 |
+
"scr_dir1_threshold_50": 0.0484849404369056,
|
283 |
+
"scr_metric_threshold_50": 0.01666678256458246,
|
284 |
+
"scr_dir2_threshold_50": 0.01666678256458246,
|
285 |
+
"scr_dir1_threshold_100": 0.15151498731504953,
|
286 |
+
"scr_metric_threshold_100": -0.17222198490744228,
|
287 |
+
"scr_dir2_threshold_100": -0.17222198490744228,
|
288 |
+
"scr_dir1_threshold_500": 0.04242405195212411,
|
289 |
+
"scr_metric_threshold_500": -0.3222220345779776,
|
290 |
+
"scr_dir2_threshold_500": -0.3222220345779776
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAETrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_17",
|
296 |
+
"sae_lens_version": "5.3.0",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "gated",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_30_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 125,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": false,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "a362b856-f390-41b5-a2ad-493cf6aa14eb",
|
73 |
+
"datetime_epoch_millis": 1736484245442,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.23745356343392862,
|
77 |
+
"scr_metric_threshold_2": 0.1109310483982693,
|
78 |
+
"scr_dir2_threshold_2": 0.1109310483982693,
|
79 |
+
"scr_dir1_threshold_5": 0.11630654572818025,
|
80 |
+
"scr_metric_threshold_5": 0.1608556253584261,
|
81 |
+
"scr_dir2_threshold_5": 0.1608556253584261,
|
82 |
+
"scr_dir1_threshold_10": 0.07069298393879633,
|
83 |
+
"scr_metric_threshold_10": 0.20339125057127797,
|
84 |
+
"scr_dir2_threshold_10": 0.20339125057127797,
|
85 |
+
"scr_dir1_threshold_20": 0.0752571603905324,
|
86 |
+
"scr_metric_threshold_20": 0.2893386729574251,
|
87 |
+
"scr_dir2_threshold_20": 0.2893386729574251,
|
88 |
+
"scr_dir1_threshold_50": 0.08523280687564687,
|
89 |
+
"scr_metric_threshold_50": 0.27812884490207235,
|
90 |
+
"scr_dir2_threshold_50": 0.27812884490207235,
|
91 |
+
"scr_dir1_threshold_100": -0.15317155578774375,
|
92 |
+
"scr_metric_threshold_100": 0.20248745052138276,
|
93 |
+
"scr_dir2_threshold_100": 0.20248745052138276,
|
94 |
+
"scr_dir1_threshold_500": -0.3168210441679802,
|
95 |
+
"scr_metric_threshold_500": 0.04788414345245733,
|
96 |
+
"scr_dir2_threshold_500": 0.04788414345245733
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.49411737481627754,
|
103 |
+
"scr_metric_threshold_2": 0.030985850697592576,
|
104 |
+
"scr_dir2_threshold_2": 0.030985850697592576,
|
105 |
+
"scr_dir1_threshold_5": 0.5411762725936564,
|
106 |
+
"scr_metric_threshold_5": 0.09577458498379425,
|
107 |
+
"scr_dir2_threshold_5": 0.09577458498379425,
|
108 |
+
"scr_dir1_threshold_10": 0.5882351703710352,
|
109 |
+
"scr_metric_threshold_10": 0.16056331926999592,
|
110 |
+
"scr_dir2_threshold_10": 0.16056331926999592,
|
111 |
+
"scr_dir1_threshold_20": 0.3882350301248752,
|
112 |
+
"scr_metric_threshold_20": 0.1577464542793782,
|
113 |
+
"scr_dir2_threshold_20": 0.1577464542793782,
|
114 |
+
"scr_dir1_threshold_50": 0.23529448888689428,
|
115 |
+
"scr_metric_threshold_50": 0.0,
|
116 |
+
"scr_dir2_threshold_50": 0.0,
|
117 |
+
"scr_dir1_threshold_100": -0.6705877155583481,
|
118 |
+
"scr_metric_threshold_100": -0.008450762872251974,
|
119 |
+
"scr_dir2_threshold_100": -0.008450762872251974,
|
120 |
+
"scr_dir1_threshold_500": 0.10588234469140236,
|
121 |
+
"scr_metric_threshold_500": 0.025352120716357125,
|
122 |
+
"scr_dir2_threshold_500": 0.025352120716357125
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.5983608719867892,
|
127 |
+
"scr_metric_threshold_2": 0.13738031702432307,
|
128 |
+
"scr_dir2_threshold_2": 0.13738031702432307,
|
129 |
+
"scr_dir1_threshold_5": 0.6475410636988108,
|
130 |
+
"scr_metric_threshold_5": 0.26198086595830455,
|
131 |
+
"scr_dir2_threshold_5": 0.26198086595830455,
|
132 |
+
"scr_dir1_threshold_10": 0.614754106369881,
|
133 |
+
"scr_metric_threshold_10": 0.3354632946817236,
|
134 |
+
"scr_dir2_threshold_10": 0.3354632946817236,
|
135 |
+
"scr_dir1_threshold_20": 0.6721314038361946,
|
136 |
+
"scr_metric_threshold_20": 0.44728445638568504,
|
137 |
+
"scr_dir2_threshold_20": 0.44728445638568504,
|
138 |
+
"scr_dir1_threshold_50": 0.6803280210277406,
|
139 |
+
"scr_metric_threshold_50": 0.5718850053196666,
|
140 |
+
"scr_dir2_threshold_50": 0.5718850053196666,
|
141 |
+
"scr_dir1_threshold_100": 0.4836067656169082,
|
142 |
+
"scr_metric_threshold_100": -0.041533389357886634,
|
143 |
+
"scr_dir2_threshold_100": -0.041533389357886634,
|
144 |
+
"scr_dir1_threshold_500": -0.3114753617807135,
|
145 |
+
"scr_metric_threshold_500": -0.3482426819117436,
|
146 |
+
"scr_dir2_threshold_500": -0.3482426819117436
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.40229909469463926,
|
151 |
+
"scr_metric_threshold_2": 0.04093566741863792,
|
152 |
+
"scr_dir2_threshold_2": 0.04093566741863792,
|
153 |
+
"scr_dir1_threshold_5": 0.4597702960637646,
|
154 |
+
"scr_metric_threshold_5": 0.08771921263311704,
|
155 |
+
"scr_dir2_threshold_5": 0.08771921263311704,
|
156 |
+
"scr_dir1_threshold_10": 0.43678208956041925,
|
157 |
+
"scr_metric_threshold_10": 0.10526319458580699,
|
158 |
+
"scr_dir2_threshold_10": 0.10526319458580699,
|
159 |
+
"scr_dir1_threshold_20": 0.3678160998288593,
|
160 |
+
"scr_metric_threshold_20": 0.201754398195269,
|
161 |
+
"scr_dir2_threshold_20": 0.201754398195269,
|
162 |
+
"scr_dir1_threshold_50": 0.49425329092954456,
|
163 |
+
"scr_metric_threshold_50": 0.2602338732840137,
|
164 |
+
"scr_dir2_threshold_50": 0.2602338732840137,
|
165 |
+
"scr_dir1_threshold_100": -0.3103442133489721,
|
166 |
+
"scr_metric_threshold_100": 0.289473610828386,
|
167 |
+
"scr_dir2_threshold_100": 0.289473610828386,
|
168 |
+
"scr_dir1_threshold_500": -0.8160916075301894,
|
169 |
+
"scr_metric_threshold_500": -0.14327492310652432,
|
170 |
+
"scr_dir2_threshold_500": -0.14327492310652432
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.14999996274708924,
|
175 |
+
"scr_metric_threshold_2": 0.09225104506066696,
|
176 |
+
"scr_dir2_threshold_2": 0.09225104506066696,
|
177 |
+
"scr_dir1_threshold_5": 0.24374989289788157,
|
178 |
+
"scr_metric_threshold_5": 0.1143911991001679,
|
179 |
+
"scr_dir2_threshold_5": 0.1143911991001679,
|
180 |
+
"scr_dir1_threshold_10": -0.3750000931322769,
|
181 |
+
"scr_metric_threshold_10": 0.18450187017801772,
|
182 |
+
"scr_dir2_threshold_10": 0.18450187017801772,
|
183 |
+
"scr_dir1_threshold_20": -0.27500024214391994,
|
184 |
+
"scr_metric_threshold_20": 0.2767526952953685,
|
185 |
+
"scr_dir2_threshold_20": 0.2767526952953685,
|
186 |
+
"scr_dir1_threshold_50": 0.0,
|
187 |
+
"scr_metric_threshold_50": 0.3357933993251259,
|
188 |
+
"scr_dir2_threshold_50": 0.3357933993251259,
|
189 |
+
"scr_dir1_threshold_100": 0.0,
|
190 |
+
"scr_metric_threshold_100": 0.4907749174882649,
|
191 |
+
"scr_dir2_threshold_100": 0.4907749174882649,
|
192 |
+
"scr_dir1_threshold_500": -0.18125031199312758,
|
193 |
+
"scr_metric_threshold_500": 0.6199262925958722,
|
194 |
+
"scr_dir2_threshold_500": 0.6199262925958722
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.11409405369856701,
|
199 |
+
"scr_metric_threshold_2": 0.2606635097866013,
|
200 |
+
"scr_dir2_threshold_2": 0.2606635097866013,
|
201 |
+
"scr_dir1_threshold_5": -1.2416103489630794,
|
202 |
+
"scr_metric_threshold_5": 0.32227493105212357,
|
203 |
+
"scr_dir2_threshold_5": 0.32227493105212357,
|
204 |
+
"scr_dir1_threshold_10": -1.2348990281645855,
|
205 |
+
"scr_metric_threshold_10": 0.3744075200148701,
|
206 |
+
"scr_dir2_threshold_10": 0.3744075200148701,
|
207 |
+
"scr_dir1_threshold_20": -1.2013420241410733,
|
208 |
+
"scr_metric_threshold_20": 0.5213270195732026,
|
209 |
+
"scr_dir2_threshold_20": 0.5213270195732026,
|
210 |
+
"scr_dir1_threshold_50": -1.2617447113896039,
|
211 |
+
"scr_metric_threshold_50": 0.3080568238411771,
|
212 |
+
"scr_dir2_threshold_50": 0.3080568238411771,
|
213 |
+
"scr_dir1_threshold_100": -1.1677846200865187,
|
214 |
+
"scr_metric_threshold_100": 0.16113760676927893,
|
215 |
+
"scr_dir2_threshold_100": 0.16113760676927893,
|
216 |
+
"scr_dir1_threshold_500": -0.8791942254718966,
|
217 |
+
"scr_metric_threshold_500": -0.1753554314937911,
|
218 |
+
"scr_dir2_threshold_500": -0.1753554314937911
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.11594182241618206,
|
223 |
+
"scr_metric_threshold_2": 0.08962260702828691,
|
224 |
+
"scr_dir2_threshold_2": 0.08962260702828691,
|
225 |
+
"scr_dir1_threshold_5": 0.20289851316657856,
|
226 |
+
"scr_metric_threshold_5": 0.2028300958453878,
|
227 |
+
"scr_dir2_threshold_5": 0.2028300958453878,
|
228 |
+
"scr_dir1_threshold_10": 0.3333333333333333,
|
229 |
+
"scr_metric_threshold_10": 0.25943398083092245,
|
230 |
+
"scr_dir2_threshold_10": 0.25943398083092245,
|
231 |
+
"scr_dir1_threshold_20": 0.3188403355827606,
|
232 |
+
"scr_metric_threshold_20": 0.32075450478945783,
|
233 |
+
"scr_dir2_threshold_20": 0.32075450478945783,
|
234 |
+
"scr_dir1_threshold_50": 0.17391294958311307,
|
235 |
+
"scr_metric_threshold_50": 0.41981123322565134,
|
236 |
+
"scr_dir2_threshold_50": 0.41981123322565134,
|
237 |
+
"scr_dir1_threshold_100": 0.014492565832892778,
|
238 |
+
"scr_metric_threshold_100": 0.45754715654934114,
|
239 |
+
"scr_dir2_threshold_100": 0.45754715654934114,
|
240 |
+
"scr_dir1_threshold_500": -0.10144925658328928,
|
241 |
+
"scr_metric_threshold_500": 0.3773583897749925,
|
242 |
+
"scr_dir2_threshold_500": 0.3773583897749925
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.05511832457489462,
|
247 |
+
"scr_metric_threshold_2": 0.12449805830099961,
|
248 |
+
"scr_dir2_threshold_2": 0.12449805830099961,
|
249 |
+
"scr_dir1_threshold_5": 0.07086614912327223,
|
250 |
+
"scr_metric_threshold_5": 0.024096313440942454,
|
251 |
+
"scr_dir2_threshold_5": 0.024096313440942454,
|
252 |
+
"scr_dir1_threshold_10": 0.14173229824654446,
|
253 |
+
"scr_metric_threshold_10": 0.09638549313984086,
|
254 |
+
"scr_dir2_threshold_10": 0.09638549313984086,
|
255 |
+
"scr_dir1_threshold_20": 0.2283462719181943,
|
256 |
+
"scr_metric_threshold_20": 0.21686753909669523,
|
257 |
+
"scr_dir2_threshold_20": 0.21686753909669523,
|
258 |
+
"scr_dir1_threshold_50": 0.2992124210414665,
|
259 |
+
"scr_metric_threshold_50": 0.25702814129028967,
|
260 |
+
"scr_dir2_threshold_50": 0.25702814129028967,
|
261 |
+
"scr_dir1_threshold_100": 0.34645683334217237,
|
262 |
+
"scr_metric_threshold_100": 0.2931727311397389,
|
263 |
+
"scr_dir2_threshold_100": 0.2931727311397389,
|
264 |
+
"scr_dir1_threshold_500": 0.13385838597235566,
|
265 |
+
"scr_metric_threshold_500": 0.16064264815044882,
|
266 |
+
"scr_dir2_threshold_500": 0.16064264815044882
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": -0.030302997463009903,
|
271 |
+
"scr_metric_threshold_2": 0.11111133186904595,
|
272 |
+
"scr_dir2_threshold_2": 0.11111133186904595,
|
273 |
+
"scr_dir1_threshold_5": 0.006060527244557103,
|
274 |
+
"scr_metric_threshold_5": 0.17777779985357126,
|
275 |
+
"scr_dir2_threshold_5": 0.17777779985357126,
|
276 |
+
"scr_dir1_threshold_10": 0.060605994926019806,
|
277 |
+
"scr_metric_threshold_10": 0.11111133186904595,
|
278 |
+
"scr_dir2_threshold_10": 0.11111133186904595,
|
279 |
+
"scr_dir1_threshold_20": 0.1030304081183683,
|
280 |
+
"scr_metric_threshold_20": 0.17222231604434454,
|
281 |
+
"scr_dir2_threshold_20": 0.17222231604434454,
|
282 |
+
"scr_dir1_threshold_50": 0.060605994926019806,
|
283 |
+
"scr_metric_threshold_50": 0.0722222829306543,
|
284 |
+
"scr_dir2_threshold_50": 0.0722222829306543,
|
285 |
+
"scr_dir1_threshold_100": 0.0787879378999155,
|
286 |
+
"scr_metric_threshold_100": -0.02222226637380919,
|
287 |
+
"scr_dir2_threshold_100": -0.02222226637380919,
|
288 |
+
"scr_dir1_threshold_500": -0.48484832064838285,
|
289 |
+
"scr_metric_threshold_500": -0.1333332671059529,
|
290 |
+
"scr_dir2_threshold_500": -0.1333332671059529
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpReluTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_30",
|
296 |
+
"sae_lens_version": "5.3.0",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "jumprelu",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_31_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 125,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": false,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "f1dde34c-3b46-4ef4-ae92-2a9c7719789c",
|
73 |
+
"datetime_epoch_millis": 1736484383218,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.3401970668294661,
|
77 |
+
"scr_metric_threshold_2": 0.1277749459525215,
|
78 |
+
"scr_dir2_threshold_2": 0.1277749459525215,
|
79 |
+
"scr_dir1_threshold_5": 0.398536158341988,
|
80 |
+
"scr_metric_threshold_5": 0.19184160236985084,
|
81 |
+
"scr_dir2_threshold_5": 0.19184160236985084,
|
82 |
+
"scr_dir1_threshold_10": 0.2998884928353217,
|
83 |
+
"scr_metric_threshold_10": 0.22702202925943843,
|
84 |
+
"scr_dir2_threshold_10": 0.22702202925943843,
|
85 |
+
"scr_dir1_threshold_20": 0.35031969165764465,
|
86 |
+
"scr_metric_threshold_20": 0.28937918968066567,
|
87 |
+
"scr_dir2_threshold_20": 0.28937918968066567,
|
88 |
+
"scr_dir1_threshold_50": 0.382583950531791,
|
89 |
+
"scr_metric_threshold_50": 0.28208852715074373,
|
90 |
+
"scr_dir2_threshold_50": 0.28208852715074373,
|
91 |
+
"scr_dir1_threshold_100": 0.16364579959227005,
|
92 |
+
"scr_metric_threshold_100": 0.2877178589918261,
|
93 |
+
"scr_dir2_threshold_100": 0.2877178589918261,
|
94 |
+
"scr_dir1_threshold_500": -0.3765676934415599,
|
95 |
+
"scr_metric_threshold_500": -0.03067586114080545,
|
96 |
+
"scr_dir2_threshold_500": -0.03067586114080545
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.5882351703710352,
|
103 |
+
"scr_metric_threshold_2": 0.005633729981235451,
|
104 |
+
"scr_dir2_threshold_2": 0.005633729981235451,
|
105 |
+
"scr_dir1_threshold_5": 0.635294068148414,
|
106 |
+
"scr_metric_threshold_5": 0.06478873428620167,
|
107 |
+
"scr_dir2_threshold_5": 0.06478873428620167,
|
108 |
+
"scr_dir1_threshold_10": 0.47058827654298824,
|
109 |
+
"scr_metric_threshold_10": 0.1014084828654285,
|
110 |
+
"scr_dir2_threshold_10": 0.1014084828654285,
|
111 |
+
"scr_dir1_threshold_20": 0.447059178269699,
|
112 |
+
"scr_metric_threshold_20": 0.2028167978304582,
|
113 |
+
"scr_dir2_threshold_20": 0.2028167978304582,
|
114 |
+
"scr_dir1_threshold_50": 0.6588231664217034,
|
115 |
+
"scr_metric_threshold_50": 0.008450594971853177,
|
116 |
+
"scr_dir2_threshold_50": 0.008450594971853177,
|
117 |
+
"scr_dir1_threshold_100": 0.14117669333213656,
|
118 |
+
"scr_metric_threshold_100": 0.07323932925805485,
|
119 |
+
"scr_dir2_threshold_100": 0.07323932925805485,
|
120 |
+
"scr_dir1_threshold_500": -0.04705889777737886,
|
121 |
+
"scr_metric_threshold_500": -0.01690152574450395,
|
122 |
+
"scr_dir2_threshold_500": -0.01690152574450395
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.7704917872602378,
|
127 |
+
"scr_metric_threshold_2": 0.1533547414920089,
|
128 |
+
"scr_dir2_threshold_2": 0.1533547414920089,
|
129 |
+
"scr_dir1_threshold_5": 0.7786888930145298,
|
130 |
+
"scr_metric_threshold_5": 0.22364232340792292,
|
131 |
+
"scr_dir2_threshold_5": 0.22364232340792292,
|
132 |
+
"scr_dir1_threshold_10": 0.7786888930145298,
|
133 |
+
"scr_metric_threshold_10": 0.30031959893884697,
|
134 |
+
"scr_dir2_threshold_10": 0.30031959893884697,
|
135 |
+
"scr_dir1_threshold_20": 0.7704917872602378,
|
136 |
+
"scr_metric_threshold_20": 0.42172530106532347,
|
137 |
+
"scr_dir2_threshold_20": 0.42172530106532347,
|
138 |
+
"scr_dir1_threshold_50": 0.8032787445891676,
|
139 |
+
"scr_metric_threshold_50": 0.5878594297873524,
|
140 |
+
"scr_dir2_threshold_50": 0.5878594297873524,
|
141 |
+
"scr_dir1_threshold_100": 0.8442623191096432,
|
142 |
+
"scr_metric_threshold_100": 0.6581470117032664,
|
143 |
+
"scr_dir2_threshold_100": 0.6581470117032664,
|
144 |
+
"scr_dir1_threshold_500": -0.03278695732892976,
|
145 |
+
"scr_metric_threshold_500": -0.3706069904246002,
|
146 |
+
"scr_dir2_threshold_500": -0.3706069904246002
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.5517244922986699,
|
151 |
+
"scr_metric_threshold_2": 0.07017540496301027,
|
152 |
+
"scr_dir2_threshold_2": 0.07017540496301027,
|
153 |
+
"scr_dir1_threshold_5": 0.5747126988020154,
|
154 |
+
"scr_metric_threshold_5": 0.12280700225591376,
|
155 |
+
"scr_dir2_threshold_5": 0.12280700225591376,
|
156 |
+
"scr_dir1_threshold_10": 0.4597702960637646,
|
157 |
+
"scr_metric_threshold_10": 0.14035080992602053,
|
158 |
+
"scr_dir2_threshold_10": 0.14035080992602053,
|
159 |
+
"scr_dir1_threshold_20": 0.4482761928120919,
|
160 |
+
"scr_metric_threshold_20": 0.22222214476329635,
|
161 |
+
"scr_dir2_threshold_20": 0.22222214476329635,
|
162 |
+
"scr_dir1_threshold_50": 0.5287356006845627,
|
163 |
+
"scr_metric_threshold_50": 0.3333333914275277,
|
164 |
+
"scr_dir2_threshold_50": 0.3333333914275277,
|
165 |
+
"scr_dir1_threshold_100": 0.34482789332551395,
|
166 |
+
"scr_metric_threshold_100": 0.41520472626480354,
|
167 |
+
"scr_dir2_threshold_100": 0.41520472626480354,
|
168 |
+
"scr_dir1_threshold_500": -0.32183900171140667,
|
169 |
+
"scr_metric_threshold_500": 0.16666660857247229,
|
170 |
+
"scr_dir2_threshold_500": 0.16666660857247229
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.3874999348074062,
|
175 |
+
"scr_metric_threshold_2": 0.13284136412363806,
|
176 |
+
"scr_dir2_threshold_2": 0.13284136412363806,
|
177 |
+
"scr_dir1_threshold_5": 0.32499998137354463,
|
178 |
+
"scr_metric_threshold_5": 0.17712189214595614,
|
179 |
+
"scr_dir2_threshold_5": 0.17712189214595614,
|
180 |
+
"scr_dir1_threshold_10": 0.28124979045237697,
|
181 |
+
"scr_metric_threshold_10": 0.18450187017801772,
|
182 |
+
"scr_dir2_threshold_10": 0.18450187017801772,
|
183 |
+
"scr_dir1_threshold_20": 0.33749982304867393,
|
184 |
+
"scr_metric_threshold_20": 0.29520308026215486,
|
185 |
+
"scr_dir2_threshold_20": 0.29520308026215486,
|
186 |
+
"scr_dir1_threshold_50": 0.09374993015079233,
|
187 |
+
"scr_metric_threshold_50": 0.16605170515454756,
|
188 |
+
"scr_dir2_threshold_50": 0.16605170515454756,
|
189 |
+
"scr_dir1_threshold_100": 0.20000007450582152,
|
190 |
+
"scr_metric_threshold_100": 0.1143911991001679,
|
191 |
+
"scr_dir2_threshold_100": 0.1143911991001679,
|
192 |
+
"scr_dir1_threshold_500": -0.7437502654269892,
|
193 |
+
"scr_metric_threshold_500": -0.46863454350544775,
|
194 |
+
"scr_dir2_threshold_500": -0.46863454350544775
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.10067101207053668,
|
199 |
+
"scr_metric_threshold_2": 0.36492897019852866,
|
200 |
+
"scr_dir2_threshold_2": 0.36492897019852866,
|
201 |
+
"scr_dir1_threshold_5": 0.10738273290007305,
|
202 |
+
"scr_metric_threshold_5": 0.331753480868465,
|
203 |
+
"scr_dir2_threshold_5": 0.331753480868465,
|
204 |
+
"scr_dir1_threshold_10": -0.657717838904299,
|
205 |
+
"scr_metric_threshold_10": 0.31279638123578213,
|
206 |
+
"scr_dir2_threshold_10": 0.31279638123578213,
|
207 |
+
"scr_dir1_threshold_20": -0.422818410708671,
|
208 |
+
"scr_metric_threshold_20": 0.3791470774094751,
|
209 |
+
"scr_dir2_threshold_20": 0.3791470774094751,
|
210 |
+
"scr_dir1_threshold_50": -0.2885903946146221,
|
211 |
+
"scr_metric_threshold_50": 0.137440949741991,
|
212 |
+
"scr_dir2_threshold_50": 0.137440949741991,
|
213 |
+
"scr_dir1_threshold_100": -1.1006706120394942,
|
214 |
+
"scr_metric_threshold_100": 0.15639804937467391,
|
215 |
+
"scr_dir2_threshold_100": 0.15639804937467391,
|
216 |
+
"scr_dir1_threshold_500": -2.0335562039614272,
|
217 |
+
"scr_metric_threshold_500": -0.018957382119117183,
|
218 |
+
"scr_dir2_threshold_500": -0.018957382119117183
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.16666666666666666,
|
223 |
+
"scr_metric_threshold_2": 0.12264132907103917,
|
224 |
+
"scr_dir2_threshold_2": 0.12264132907103917,
|
225 |
+
"scr_dir1_threshold_5": 0.2898547719992951,
|
226 |
+
"scr_metric_threshold_5": 0.26415090095789157,
|
227 |
+
"scr_dir2_threshold_5": 0.26415090095789157,
|
228 |
+
"scr_dir1_threshold_10": 0.4710144364165345,
|
229 |
+
"scr_metric_threshold_10": 0.35377350798617846,
|
230 |
+
"scr_dir2_threshold_10": 0.35377350798617846,
|
231 |
+
"scr_dir1_threshold_20": 0.5217388487493392,
|
232 |
+
"scr_metric_threshold_20": 0.3915094313098682,
|
233 |
+
"scr_dir2_threshold_20": 0.3915094313098682,
|
234 |
+
"scr_dir1_threshold_50": 0.4637681535000881,
|
235 |
+
"scr_metric_threshold_50": 0.47169791693024854,
|
236 |
+
"scr_dir2_threshold_50": 0.47169791693024854,
|
237 |
+
"scr_dir1_threshold_100": 0.0289855635834655,
|
238 |
+
"scr_metric_threshold_100": 0.6037733674091943,
|
239 |
+
"scr_dir2_threshold_100": 0.6037733674091943,
|
240 |
+
"scr_dir1_threshold_500": -0.1884059473336858,
|
241 |
+
"scr_metric_threshold_500": 0.3962263514368374,
|
242 |
+
"scr_dir2_threshold_500": 0.3962263514368374
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.12598447369816684,
|
247 |
+
"scr_metric_threshold_2": 0.10040174486005715,
|
248 |
+
"scr_dir2_threshold_2": 0.10040174486005715,
|
249 |
+
"scr_dir1_threshold_5": 0.36220465789054995,
|
250 |
+
"scr_metric_threshold_5": 0.17269068518288452,
|
251 |
+
"scr_dir2_threshold_5": 0.17269068518288452,
|
252 |
+
"scr_dir1_threshold_10": 0.40157468858928047,
|
253 |
+
"scr_metric_threshold_10": 0.20080325034404325,
|
254 |
+
"scr_dir2_threshold_10": 0.20080325034404325,
|
255 |
+
"scr_dir1_threshold_20": 0.4881891315887168,
|
256 |
+
"scr_metric_threshold_20": 0.2690764176987964,
|
257 |
+
"scr_dir2_threshold_20": 0.2690764176987964,
|
258 |
+
"scr_dir1_threshold_50": 0.5039369561370944,
|
259 |
+
"scr_metric_threshold_50": 0.3574296467742758,
|
260 |
+
"scr_dir2_threshold_50": 0.3574296467742758,
|
261 |
+
"scr_dir1_threshold_100": 0.5354330745616361,
|
262 |
+
"scr_metric_threshold_100": 0.36947792318278255,
|
263 |
+
"scr_dir2_threshold_100": 0.36947792318278255,
|
264 |
+
"scr_dir1_threshold_500": 0.4881891315887168,
|
265 |
+
"scr_metric_threshold_500": 0.14457835939779684,
|
266 |
+
"scr_dir2_threshold_500": 0.14457835939779684
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.030302997463009903,
|
271 |
+
"scr_metric_threshold_2": 0.0722222829306543,
|
272 |
+
"scr_dir2_threshold_2": 0.0722222829306543,
|
273 |
+
"scr_dir1_threshold_5": 0.11515146260748252,
|
274 |
+
"scr_metric_threshold_5": 0.17777779985357126,
|
275 |
+
"scr_dir2_threshold_5": 0.17777779985357126,
|
276 |
+
"scr_dir1_threshold_10": 0.19393940050739802,
|
277 |
+
"scr_metric_threshold_10": 0.22222233260118965,
|
278 |
+
"scr_dir2_threshold_10": 0.22222233260118965,
|
279 |
+
"scr_dir1_threshold_20": 0.21212098224106932,
|
280 |
+
"scr_metric_threshold_20": 0.1333332671059529,
|
281 |
+
"scr_dir2_threshold_20": 0.1333332671059529,
|
282 |
+
"scr_dir1_threshold_50": 0.29696944738554193,
|
283 |
+
"scr_metric_threshold_50": 0.1944445824181537,
|
284 |
+
"scr_dir2_threshold_50": 0.1944445824181537,
|
285 |
+
"scr_dir1_threshold_100": 0.3151513903594376,
|
286 |
+
"scr_metric_threshold_100": -0.0888887343583345,
|
287 |
+
"scr_dir2_threshold_100": -0.0888887343583345,
|
288 |
+
"scr_dir1_threshold_500": -0.1333334055813782,
|
289 |
+
"scr_metric_threshold_500": -0.07777776673988103,
|
290 |
+
"scr_dir2_threshold_500": -0.07777776673988103
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpReluTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_31",
|
296 |
+
"sae_lens_version": "5.3.0",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "jumprelu",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_32_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 125,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": false,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "8e4200bd-1def-4d8a-9c7c-e563173ed02b",
|
73 |
+
"datetime_epoch_millis": 1736484522999,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.3501525101140079,
|
77 |
+
"scr_metric_threshold_2": 0.12723577032713196,
|
78 |
+
"scr_dir2_threshold_2": 0.12723577032713196,
|
79 |
+
"scr_dir1_threshold_5": 0.43948386099404796,
|
80 |
+
"scr_metric_threshold_5": 0.207046581750635,
|
81 |
+
"scr_dir2_threshold_5": 0.207046581750635,
|
82 |
+
"scr_dir1_threshold_10": 0.40886092742352076,
|
83 |
+
"scr_metric_threshold_10": 0.22763895745375273,
|
84 |
+
"scr_dir2_threshold_10": 0.22763895745375273,
|
85 |
+
"scr_dir1_threshold_20": 0.4552556771061577,
|
86 |
+
"scr_metric_threshold_20": 0.29241968020031833,
|
87 |
+
"scr_dir2_threshold_20": 0.29241968020031833,
|
88 |
+
"scr_dir1_threshold_50": 0.419013286564724,
|
89 |
+
"scr_metric_threshold_50": 0.31166868206643855,
|
90 |
+
"scr_dir2_threshold_50": 0.31166868206643855,
|
91 |
+
"scr_dir1_threshold_100": 0.3014584115093985,
|
92 |
+
"scr_metric_threshold_100": 0.2773480250280143,
|
93 |
+
"scr_dir2_threshold_100": 0.2773480250280143,
|
94 |
+
"scr_dir1_threshold_500": -0.12908805538796575,
|
95 |
+
"scr_metric_threshold_500": 0.01758353694935952,
|
96 |
+
"scr_dir2_threshold_500": 0.01758353694935952
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.5999997195076798,
|
103 |
+
"scr_metric_threshold_2": 0.025352120716357125,
|
104 |
+
"scr_dir2_threshold_2": 0.025352120716357125,
|
105 |
+
"scr_dir1_threshold_5": 0.635294068148414,
|
106 |
+
"scr_metric_threshold_5": 0.04788720854169773,
|
107 |
+
"scr_dir2_threshold_5": 0.04788720854169773,
|
108 |
+
"scr_dir1_threshold_10": 0.6941175150624376,
|
109 |
+
"scr_metric_threshold_10": 0.09295771999317652,
|
110 |
+
"scr_dir2_threshold_10": 0.09295771999317652,
|
111 |
+
"scr_dir1_threshold_20": 0.41176482962896477,
|
112 |
+
"scr_metric_threshold_20": 0.17183094713286562,
|
113 |
+
"scr_dir2_threshold_20": 0.17183094713286562,
|
114 |
+
"scr_dir1_threshold_50": 0.6117649698751249,
|
115 |
+
"scr_metric_threshold_50": 0.02816898570697485,
|
116 |
+
"scr_dir2_threshold_50": 0.02816898570697485,
|
117 |
+
"scr_dir1_threshold_100": 0.6117649698751249,
|
118 |
+
"scr_metric_threshold_100": -0.0112676278628697,
|
119 |
+
"scr_dir2_threshold_100": -0.0112676278628697,
|
120 |
+
"scr_dir1_threshold_500": -0.7529409619764611,
|
121 |
+
"scr_metric_threshold_500": -0.14084509643527304,
|
122 |
+
"scr_dir2_threshold_500": -0.14084509643527304
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.8114753617807134,
|
127 |
+
"scr_metric_threshold_2": 0.13418527978665729,
|
128 |
+
"scr_dir2_threshold_2": 0.13418527978665729,
|
129 |
+
"scr_dir1_threshold_5": 0.8360657019180974,
|
130 |
+
"scr_metric_threshold_5": 0.25239632553578956,
|
131 |
+
"scr_dir2_threshold_5": 0.25239632553578956,
|
132 |
+
"scr_dir1_threshold_10": 0.7950821273976216,
|
133 |
+
"scr_metric_threshold_10": 0.297124752131342,
|
134 |
+
"scr_dir2_threshold_10": 0.297124752131342,
|
135 |
+
"scr_dir1_threshold_20": 0.8442623191096432,
|
136 |
+
"scr_metric_threshold_20": 0.39297129893745686,
|
137 |
+
"scr_dir2_threshold_20": 0.39297129893745686,
|
138 |
+
"scr_dir1_threshold_50": 0.8524589363011892,
|
139 |
+
"scr_metric_threshold_50": 0.5718850053196666,
|
140 |
+
"scr_dir2_threshold_50": 0.5718850053196666,
|
141 |
+
"scr_dir1_threshold_100": 0.885245893630119,
|
142 |
+
"scr_metric_threshold_100": 0.5047924606414183,
|
143 |
+
"scr_dir2_threshold_100": 0.5047924606414183,
|
144 |
+
"scr_dir1_threshold_500": -0.4672130426710702,
|
145 |
+
"scr_metric_threshold_500": -0.4728434212758859,
|
146 |
+
"scr_dir2_threshold_500": -0.4728434212758859
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.5977015904161226,
|
151 |
+
"scr_metric_threshold_2": 0.11111107238164818,
|
152 |
+
"scr_dir2_threshold_2": 0.11111107238164818,
|
153 |
+
"scr_dir1_threshold_5": 0.6091956936677952,
|
154 |
+
"scr_metric_threshold_5": 0.15789479187871047,
|
155 |
+
"scr_dir2_threshold_5": 0.15789479187871047,
|
156 |
+
"scr_dir1_threshold_10": 0.49425329092954456,
|
157 |
+
"scr_metric_threshold_10": 0.16959072175297604,
|
158 |
+
"scr_dir2_threshold_10": 0.16959072175297604,
|
159 |
+
"scr_dir1_threshold_20": 0.5172414974328899,
|
160 |
+
"scr_metric_threshold_20": 0.2543859954881725,
|
161 |
+
"scr_dir2_threshold_20": 0.2543859954881725,
|
162 |
+
"scr_dir1_threshold_50": 0.2758625887047159,
|
163 |
+
"scr_metric_threshold_50": 0.37719299774408627,
|
164 |
+
"scr_dir2_threshold_50": 0.37719299774408627,
|
165 |
+
"scr_dir1_threshold_100": 0.206896598973156,
|
166 |
+
"scr_metric_threshold_100": 0.43567247283283095,
|
167 |
+
"scr_dir2_threshold_100": 0.43567247283283095,
|
168 |
+
"scr_dir1_threshold_500": 0.011494788362434555,
|
169 |
+
"scr_metric_threshold_500": -0.049707658394982895,
|
170 |
+
"scr_dir2_threshold_500": -0.049707658394982895
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.40000014901164305,
|
175 |
+
"scr_metric_threshold_2": 0.1143911991001679,
|
176 |
+
"scr_dir2_threshold_2": 0.1143911991001679,
|
177 |
+
"scr_dir1_threshold_5": 0.4312501257285738,
|
178 |
+
"scr_metric_threshold_5": 0.22140220022495802,
|
179 |
+
"scr_dir2_threshold_5": 0.22140220022495802,
|
180 |
+
"scr_dir1_threshold_10": 0.20000007450582152,
|
181 |
+
"scr_metric_threshold_10": 0.14022134215569965,
|
182 |
+
"scr_dir2_threshold_10": 0.14022134215569965,
|
183 |
+
"scr_dir1_threshold_20": 0.4625001024455046,
|
184 |
+
"scr_metric_threshold_20": 0.2656827282472761,
|
185 |
+
"scr_dir2_threshold_20": 0.2656827282472761,
|
186 |
+
"scr_dir1_threshold_50": 0.11874998603015846,
|
187 |
+
"scr_metric_threshold_50": 0.13653135313966885,
|
188 |
+
"scr_dir2_threshold_50": 0.13653135313966885,
|
189 |
+
"scr_dir1_threshold_100": -0.2500001862645538,
|
190 |
+
"scr_metric_threshold_100": -0.10701100112479012,
|
191 |
+
"scr_dir2_threshold_100": -0.10701100112479012,
|
192 |
+
"scr_dir1_threshold_500": 0.05625003259629691,
|
193 |
+
"scr_metric_threshold_500": -0.509225082511735,
|
194 |
+
"scr_dir2_threshold_500": -0.509225082511735
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.0939596912720427,
|
199 |
+
"scr_metric_threshold_2": 0.40758300934493374,
|
200 |
+
"scr_dir2_threshold_2": 0.40758300934493374,
|
201 |
+
"scr_dir1_threshold_5": 0.1946311033736218,
|
202 |
+
"scr_metric_threshold_5": 0.4834122553349682,
|
203 |
+
"scr_dir2_threshold_5": 0.4834122553349682,
|
204 |
+
"scr_dir1_threshold_10": -0.1879193825440854,
|
205 |
+
"scr_metric_threshold_10": 0.4265403914640509,
|
206 |
+
"scr_dir2_threshold_10": 0.4265403914640509,
|
207 |
+
"scr_dir1_threshold_20": 0.026845683225018263,
|
208 |
+
"scr_metric_threshold_20": 0.469194430610456,
|
209 |
+
"scr_dir2_threshold_20": 0.469194430610456,
|
210 |
+
"scr_dir1_threshold_50": 0.08053704967505479,
|
211 |
+
"scr_metric_threshold_50": 0.5213270195732026,
|
212 |
+
"scr_dir2_threshold_50": 0.5213270195732026,
|
213 |
+
"scr_dir1_threshold_100": -0.4832210979572015,
|
214 |
+
"scr_metric_threshold_100": 0.6824646263424815,
|
215 |
+
"scr_dir2_threshold_100": 0.6824646263424815,
|
216 |
+
"scr_dir1_threshold_500": -0.4026844483131891,
|
217 |
+
"scr_metric_threshold_500": 0.6824646263424815,
|
218 |
+
"scr_dir2_threshold_500": 0.6824646263424815
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.17391294958311307,
|
223 |
+
"scr_metric_threshold_2": 0.09433952715525604,
|
224 |
+
"scr_dir2_threshold_2": 0.09433952715525604,
|
225 |
+
"scr_dir1_threshold_5": 0.34782589916622614,
|
226 |
+
"scr_metric_threshold_5": 0.15094341214079066,
|
227 |
+
"scr_dir2_threshold_5": 0.15094341214079066,
|
228 |
+
"scr_dir1_threshold_10": 0.5217388487493392,
|
229 |
+
"scr_metric_threshold_10": 0.25471677954998495,
|
230 |
+
"scr_dir2_threshold_10": 0.25471677954998495,
|
231 |
+
"scr_dir1_threshold_20": 0.5724636929998238,
|
232 |
+
"scr_metric_threshold_20": 0.37264146964802336,
|
233 |
+
"scr_dir2_threshold_20": 0.37264146964802336,
|
234 |
+
"scr_dir1_threshold_50": 0.5797099759162702,
|
235 |
+
"scr_metric_threshold_50": 0.47169791693024854,
|
236 |
+
"scr_dir2_threshold_50": 0.47169791693024854,
|
237 |
+
"scr_dir1_threshold_100": 0.6304348201667548,
|
238 |
+
"scr_metric_threshold_100": 0.3915094313098682,
|
239 |
+
"scr_dir2_threshold_100": 0.3915094313098682,
|
240 |
+
"scr_dir1_threshold_500": 0.5869562588327165,
|
241 |
+
"scr_metric_threshold_500": 0.4952827987190625,
|
242 |
+
"scr_dir2_threshold_500": 0.4952827987190625
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.11811009209619154,
|
247 |
+
"scr_metric_threshold_2": 0.06425715501060796,
|
248 |
+
"scr_dir2_threshold_2": 0.06425715501060796,
|
249 |
+
"scr_dir1_threshold_5": 0.34645683334217237,
|
250 |
+
"scr_metric_threshold_5": 0.16465866049459404,
|
251 |
+
"scr_dir2_threshold_5": 0.16465866049459404,
|
252 |
+
"scr_dir1_threshold_10": 0.5354330745616361,
|
253 |
+
"scr_metric_threshold_10": 0.1566266358063036,
|
254 |
+
"scr_dir2_threshold_10": 0.1566266358063036,
|
255 |
+
"scr_dir1_threshold_20": 0.5039369561370944,
|
256 |
+
"scr_metric_threshold_20": 0.3293173209891881,
|
257 |
+
"scr_dir2_threshold_20": 0.3293173209891881,
|
258 |
+
"scr_dir1_threshold_50": 0.5118108684112832,
|
259 |
+
"scr_metric_threshold_50": 0.46987942866676863,
|
260 |
+
"scr_dir2_threshold_50": 0.46987942866676863,
|
261 |
+
"scr_dir1_threshold_100": 0.5984253114107195,
|
262 |
+
"scr_metric_threshold_100": 0.43373483881731945,
|
263 |
+
"scr_dir2_threshold_100": 0.43373483881731945,
|
264 |
+
"scr_dir1_threshold_500": 0.5354330745616361,
|
265 |
+
"scr_metric_threshold_500": 0.38554221193543453,
|
266 |
+
"scr_dir2_threshold_500": 0.38554221193543453
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.006060527244557103,
|
271 |
+
"scr_metric_threshold_2": 0.06666679912142758,
|
272 |
+
"scr_dir2_threshold_2": 0.06666679912142758,
|
273 |
+
"scr_dir1_threshold_5": 0.11515146260748252,
|
274 |
+
"scr_metric_threshold_5": 0.17777779985357126,
|
275 |
+
"scr_dir2_threshold_5": 0.17777779985357126,
|
276 |
+
"scr_dir1_threshold_10": 0.21818187072585082,
|
277 |
+
"scr_metric_threshold_10": 0.2833333167764882,
|
278 |
+
"scr_dir2_threshold_10": 0.2833333167764882,
|
279 |
+
"scr_dir1_threshold_20": 0.3030303358703234,
|
280 |
+
"scr_metric_threshold_20": 0.08333325054910777,
|
281 |
+
"scr_dir2_threshold_20": 0.08333325054910777,
|
282 |
+
"scr_dir1_threshold_50": 0.32121191760399476,
|
283 |
+
"scr_metric_threshold_50": -0.08333325054910777,
|
284 |
+
"scr_dir2_threshold_50": -0.08333325054910777,
|
285 |
+
"scr_dir1_threshold_100": 0.21212098224106932,
|
286 |
+
"scr_metric_threshold_100": -0.1111110007321437,
|
287 |
+
"scr_dir2_threshold_100": -0.1111110007321437,
|
288 |
+
"scr_dir1_threshold_500": -0.6000001444960897,
|
289 |
+
"scr_metric_threshold_500": -0.2500000827842256,
|
290 |
+
"scr_dir2_threshold_500": -0.2500000827842256
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpReluTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_32",
|
296 |
+
"sae_lens_version": "5.3.0",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "jumprelu",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_33_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 125,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": false,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "d58fae9f-2960-42da-bfc7-cc7b189fd747",
|
73 |
+
"datetime_epoch_millis": 1736484661295,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.41981032588980666,
|
77 |
+
"scr_metric_threshold_2": 0.1234894823826268,
|
78 |
+
"scr_dir2_threshold_2": 0.1234894823826268,
|
79 |
+
"scr_dir1_threshold_5": 0.43534356048113265,
|
80 |
+
"scr_metric_threshold_5": 0.19466786284925236,
|
81 |
+
"scr_dir2_threshold_5": 0.19466786284925236,
|
82 |
+
"scr_dir1_threshold_10": 0.40442771867890437,
|
83 |
+
"scr_metric_threshold_10": 0.2786426210678044,
|
84 |
+
"scr_dir2_threshold_10": 0.2786426210678044,
|
85 |
+
"scr_dir1_threshold_20": 0.43752789697350664,
|
86 |
+
"scr_metric_threshold_20": 0.3721602839079613,
|
87 |
+
"scr_dir2_threshold_20": 0.3721602839079613,
|
88 |
+
"scr_dir1_threshold_50": 0.18064775986922108,
|
89 |
+
"scr_metric_threshold_50": 0.4603767668408881,
|
90 |
+
"scr_dir2_threshold_50": 0.4603767668408881,
|
91 |
+
"scr_dir1_threshold_100": 0.061290157505954276,
|
92 |
+
"scr_metric_threshold_100": 0.38863945018417695,
|
93 |
+
"scr_dir2_threshold_100": 0.38863945018417695,
|
94 |
+
"scr_dir1_threshold_500": -1.3073664811810524,
|
95 |
+
"scr_metric_threshold_500": 0.0330351645248599,
|
96 |
+
"scr_dir2_threshold_500": 0.0330351645248599
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.5999997195076798,
|
103 |
+
"scr_metric_threshold_2": 0.033802715688210305,
|
104 |
+
"scr_dir2_threshold_2": 0.033802715688210305,
|
105 |
+
"scr_dir1_threshold_5": 0.6117649698751249,
|
106 |
+
"scr_metric_threshold_5": 0.05915483640456743,
|
107 |
+
"scr_dir2_threshold_5": 0.05915483640456743,
|
108 |
+
"scr_dir1_threshold_10": 0.5529415229611013,
|
109 |
+
"scr_metric_threshold_10": 0.1464788264165085,
|
110 |
+
"scr_dir2_threshold_10": 0.1464788264165085,
|
111 |
+
"scr_dir1_threshold_20": 0.635294068148414,
|
112 |
+
"scr_metric_threshold_20": 0.12676043568138684,
|
113 |
+
"scr_dir2_threshold_20": 0.12676043568138684,
|
114 |
+
"scr_dir1_threshold_50": 0.635294068148414,
|
115 |
+
"scr_metric_threshold_50": 0.17464781212348335,
|
116 |
+
"scr_dir2_threshold_50": 0.17464781212348335,
|
117 |
+
"scr_dir1_threshold_100": 0.7058820641990823,
|
118 |
+
"scr_metric_threshold_100": 0.17183094713286562,
|
119 |
+
"scr_dir2_threshold_100": 0.17183094713286562,
|
120 |
+
"scr_dir1_threshold_500": 0.08235324641811308,
|
121 |
+
"scr_metric_threshold_500": 0.2591549371448067,
|
122 |
+
"scr_dir2_threshold_500": 0.2591549371448067
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.8196724675350056,
|
127 |
+
"scr_metric_threshold_2": 0.1501598946845039,
|
128 |
+
"scr_dir2_threshold_2": 0.1501598946845039,
|
129 |
+
"scr_dir1_threshold_5": 0.8442623191096432,
|
130 |
+
"scr_metric_threshold_5": 0.23322686383043795,
|
131 |
+
"scr_dir2_threshold_5": 0.23322686383043795,
|
132 |
+
"scr_dir1_threshold_10": 0.6639342980819026,
|
133 |
+
"scr_metric_threshold_10": 0.30990413936136196,
|
134 |
+
"scr_dir2_threshold_10": 0.30990413936136196,
|
135 |
+
"scr_dir1_threshold_20": 0.762295170068692,
|
136 |
+
"scr_metric_threshold_20": 0.44089457234051427,
|
137 |
+
"scr_dir2_threshold_20": 0.44089457234051427,
|
138 |
+
"scr_dir1_threshold_50": 0.8688526592470271,
|
139 |
+
"scr_metric_threshold_50": 0.42172530106532347,
|
140 |
+
"scr_dir2_threshold_50": 0.42172530106532347,
|
141 |
+
"scr_dir1_threshold_100": 0.8524589363011892,
|
142 |
+
"scr_metric_threshold_100": 0.09584673723627565,
|
143 |
+
"scr_dir2_threshold_100": 0.09584673723627565,
|
144 |
+
"scr_dir1_threshold_500": -2.1639347866446488,
|
145 |
+
"scr_metric_threshold_500": 0.306709292553857,
|
146 |
+
"scr_dir2_threshold_500": 0.306709292553857
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.620689796919468,
|
151 |
+
"scr_metric_threshold_2": 0.12573094115383435,
|
152 |
+
"scr_dir2_threshold_2": 0.12573094115383435,
|
153 |
+
"scr_dir1_threshold_5": 0.6091956936677952,
|
154 |
+
"scr_metric_threshold_5": 0.17251466065089666,
|
155 |
+
"scr_dir2_threshold_5": 0.17251466065089666,
|
156 |
+
"scr_dir1_threshold_10": 0.5747126988020154,
|
157 |
+
"scr_metric_threshold_10": 0.21929820586537577,
|
158 |
+
"scr_dir2_threshold_10": 0.21929820586537577,
|
159 |
+
"scr_dir1_threshold_20": 0.5517244922986699,
|
160 |
+
"scr_metric_threshold_20": 0.27192980315827925,
|
161 |
+
"scr_dir2_threshold_20": 0.27192980315827925,
|
162 |
+
"scr_dir1_threshold_50": -2.2528730119798466,
|
163 |
+
"scr_metric_threshold_50": 0.3567250768934757,
|
164 |
+
"scr_dir2_threshold_50": 0.3567250768934757,
|
165 |
+
"scr_dir1_threshold_100": -1.6091950085570335,
|
166 |
+
"scr_metric_threshold_100": 0.4473684027070965,
|
167 |
+
"scr_dir2_threshold_100": 0.4473684027070965,
|
168 |
+
"scr_dir1_threshold_500": -3.5862061169429262,
|
169 |
+
"scr_metric_threshold_500": -0.2953216629068104,
|
170 |
+
"scr_dir2_threshold_500": -0.2953216629068104
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.44999988824126774,
|
175 |
+
"scr_metric_threshold_2": 0.08118085806925839,
|
176 |
+
"scr_dir2_threshold_2": 0.08118085806925839,
|
177 |
+
"scr_dir1_threshold_5": 0.38125001396984154,
|
178 |
+
"scr_metric_threshold_5": 0.16605170515454756,
|
179 |
+
"scr_dir2_threshold_5": 0.16605170515454756,
|
180 |
+
"scr_dir1_threshold_10": 0.2312500512227523,
|
181 |
+
"scr_metric_threshold_10": 0.3062730473102472,
|
182 |
+
"scr_dir2_threshold_10": 0.3062730473102472,
|
183 |
+
"scr_dir1_threshold_20": -0.21250028871005835,
|
184 |
+
"scr_metric_threshold_20": 0.402214081386945,
|
185 |
+
"scr_dir2_threshold_20": 0.402214081386945,
|
186 |
+
"scr_dir1_threshold_50": 0.01874976251269393,
|
187 |
+
"scr_metric_threshold_50": 0.4206642464104151,
|
188 |
+
"scr_dir2_threshold_50": 0.4206642464104151,
|
189 |
+
"scr_dir1_threshold_100": -0.41250036321587985,
|
190 |
+
"scr_metric_threshold_100": 0.1918818482100793,
|
191 |
+
"scr_dir2_threshold_100": 0.1918818482100793,
|
192 |
+
"scr_dir1_threshold_500": -1.6937505261973644,
|
193 |
+
"scr_metric_threshold_500": -0.7564574257922249,
|
194 |
+
"scr_dir2_threshold_500": -0.7564574257922249
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.21476506576910367,
|
199 |
+
"scr_metric_threshold_2": 0.265402784694772,
|
200 |
+
"scr_dir2_threshold_2": 0.265402784694772,
|
201 |
+
"scr_dir1_threshold_5": 0.20805374497060972,
|
202 |
+
"scr_metric_threshold_5": 0.3033175489330064,
|
203 |
+
"scr_dir2_threshold_5": 0.3033175489330064,
|
204 |
+
"scr_dir1_threshold_10": 0.1543623785205732,
|
205 |
+
"scr_metric_threshold_10": 0.43601894128039237,
|
206 |
+
"scr_dir2_threshold_10": 0.43601894128039237,
|
207 |
+
"scr_dir1_threshold_20": 0.2483220697926159,
|
208 |
+
"scr_metric_threshold_20": 0.4834122553349682,
|
209 |
+
"scr_dir2_threshold_20": 0.4834122553349682,
|
210 |
+
"scr_dir1_threshold_50": 0.46979845636021356,
|
211 |
+
"scr_metric_threshold_50": 0.6729857940397057,
|
212 |
+
"scr_dir2_threshold_50": 0.6729857940397057,
|
213 |
+
"scr_dir1_threshold_100": 0.45637581476322564,
|
214 |
+
"scr_metric_threshold_100": 0.8388626757171553,
|
215 |
+
"scr_dir2_threshold_100": 0.8388626757171553,
|
216 |
+
"scr_dir1_threshold_500": -0.7449662093778477,
|
217 |
+
"scr_metric_threshold_500": 0.7582938723325159,
|
218 |
+
"scr_dir2_threshold_500": 0.7582938723325159
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.28260848908284875,
|
223 |
+
"scr_metric_threshold_2": 0.13679237060591493,
|
224 |
+
"scr_dir2_threshold_2": 0.13679237060591493,
|
225 |
+
"scr_dir1_threshold_5": 0.36231889691679886,
|
226 |
+
"scr_metric_threshold_5": 0.2877357827467056,
|
227 |
+
"scr_dir2_threshold_5": 0.2877357827467056,
|
228 |
+
"scr_dir1_threshold_10": 0.4275363070001762,
|
229 |
+
"scr_metric_threshold_10": 0.35849042811314763,
|
230 |
+
"scr_dir2_threshold_10": 0.35849042811314763,
|
231 |
+
"scr_dir1_threshold_20": 0.5869562588327165,
|
232 |
+
"scr_metric_threshold_20": 0.5094338402539382,
|
233 |
+
"scr_dir2_threshold_20": 0.5094338402539382,
|
234 |
+
"scr_dir1_threshold_50": 0.5797099759162702,
|
235 |
+
"scr_metric_threshold_50": 0.6981131757184187,
|
236 |
+
"scr_dir2_threshold_50": 0.6981131757184187,
|
237 |
+
"scr_dir1_threshold_100": -0.04347812941635828,
|
238 |
+
"scr_metric_threshold_100": 0.32075450478945783,
|
239 |
+
"scr_dir2_threshold_100": 0.32075450478945783,
|
240 |
+
"scr_dir1_threshold_500": 0.10144925658328928,
|
241 |
+
"scr_metric_threshold_500": 0.33018862619736444,
|
242 |
+
"scr_dir2_threshold_500": 0.33018862619736444
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.26771677194471133,
|
247 |
+
"scr_metric_threshold_2": 0.10040174486005715,
|
248 |
+
"scr_dir2_threshold_2": 0.10040174486005715,
|
249 |
+
"scr_dir1_threshold_5": 0.259842390342736,
|
250 |
+
"scr_metric_threshold_5": 0.16867467283873927,
|
251 |
+
"scr_dir2_threshold_5": 0.16867467283873927,
|
252 |
+
"scr_dir1_threshold_10": 0.3700785701647388,
|
253 |
+
"scr_metric_threshold_10": 0.2248995637849857,
|
254 |
+
"scr_dir2_threshold_10": 0.2248995637849857,
|
255 |
+
"scr_dir1_threshold_20": 0.5826770175345555,
|
256 |
+
"scr_metric_threshold_20": 0.3815261995912893,
|
257 |
+
"scr_dir2_threshold_20": 0.3815261995912893,
|
258 |
+
"scr_dir1_threshold_50": 0.6771653728081806,
|
259 |
+
"scr_metric_threshold_50": 0.5381525960215219,
|
260 |
+
"scr_dir2_threshold_50": 0.5381525960215219,
|
261 |
+
"scr_dir1_threshold_100": 0.7165354035069111,
|
262 |
+
"scr_metric_threshold_100": 0.6425703532257242,
|
263 |
+
"scr_dir2_threshold_100": 0.6425703532257242,
|
264 |
+
"scr_dir1_threshold_500": -1.1023617982200273,
|
265 |
+
"scr_metric_threshold_500": 0.31726904458068134,
|
266 |
+
"scr_dir2_threshold_500": 0.31726904458068134
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.1030304081183683,
|
271 |
+
"scr_metric_threshold_2": 0.0944445493044635,
|
272 |
+
"scr_dir2_threshold_2": 0.0944445493044635,
|
273 |
+
"scr_dir1_threshold_5": 0.20606045499651224,
|
274 |
+
"scr_metric_threshold_5": 0.1666668322351178,
|
275 |
+
"scr_dir2_threshold_5": 0.1666668322351178,
|
276 |
+
"scr_dir1_threshold_10": 0.26060592267797494,
|
277 |
+
"scr_metric_threshold_10": 0.22777781641041636,
|
278 |
+
"scr_dir2_threshold_10": 0.22777781641041636,
|
279 |
+
"scr_dir1_threshold_20": 0.34545438782244753,
|
280 |
+
"scr_metric_threshold_20": 0.3611110835163693,
|
281 |
+
"scr_dir2_threshold_20": 0.3611110835163693,
|
282 |
+
"scr_dir1_threshold_50": 0.44848479594081586,
|
283 |
+
"scr_metric_threshold_50": 0.4000001324547609,
|
284 |
+
"scr_dir2_threshold_50": 0.4000001324547609,
|
285 |
+
"scr_dir1_threshold_100": -0.17575745753350233,
|
286 |
+
"scr_metric_threshold_100": 0.4000001324547609,
|
287 |
+
"scr_dir2_threshold_100": 0.4000001324547609,
|
288 |
+
"scr_dir1_threshold_500": -1.3515149150670047,
|
289 |
+
"scr_metric_threshold_500": -0.655555367911311,
|
290 |
+
"scr_dir2_threshold_500": -0.655555367911311
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpReluTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_33",
|
296 |
+
"sae_lens_version": "5.3.0",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "jumprelu",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_34_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 125,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": false,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "670ca67c-1365-4c56-8ec8-636742a581a7",
|
73 |
+
"datetime_epoch_millis": 1736484802212,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.40775944381189844,
|
77 |
+
"scr_metric_threshold_2": 0.07300099949345072,
|
78 |
+
"scr_dir2_threshold_2": 0.07300099949345072,
|
79 |
+
"scr_dir1_threshold_5": 0.32444944331423853,
|
80 |
+
"scr_metric_threshold_5": 0.1441600726966133,
|
81 |
+
"scr_dir2_threshold_5": 0.1441600726966133,
|
82 |
+
"scr_dir1_threshold_10": 0.4283040451128787,
|
83 |
+
"scr_metric_threshold_10": 0.28425922021406314,
|
84 |
+
"scr_dir2_threshold_10": 0.28425922021406314,
|
85 |
+
"scr_dir1_threshold_20": 0.3950719375932281,
|
86 |
+
"scr_metric_threshold_20": 0.36282972194943414,
|
87 |
+
"scr_dir2_threshold_20": 0.36282972194943414,
|
88 |
+
"scr_dir1_threshold_50": 0.23774254658762659,
|
89 |
+
"scr_metric_threshold_50": 0.44978790197553425,
|
90 |
+
"scr_dir2_threshold_50": 0.44978790197553425,
|
91 |
+
"scr_dir1_threshold_100": 0.006344536646292712,
|
92 |
+
"scr_metric_threshold_100": 0.35760931529173534,
|
93 |
+
"scr_dir2_threshold_100": 0.35760931529173534,
|
94 |
+
"scr_dir1_threshold_500": -1.3668218131093501,
|
95 |
+
"scr_metric_threshold_500": -0.194814897857172,
|
96 |
+
"scr_dir2_threshold_500": -0.194814897857172
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.6470586172850588,
|
103 |
+
"scr_metric_threshold_2": 0.0112676278628697,
|
104 |
+
"scr_dir2_threshold_2": 0.0112676278628697,
|
105 |
+
"scr_dir1_threshold_5": 0.1529412424687812,
|
106 |
+
"scr_metric_threshold_5": 0.09295771999317652,
|
107 |
+
"scr_dir2_threshold_5": 0.09295771999317652,
|
108 |
+
"scr_dir1_threshold_10": 0.6470586172850588,
|
109 |
+
"scr_metric_threshold_10": 0.10704221284666396,
|
110 |
+
"scr_dir2_threshold_10": 0.10704221284666396,
|
111 |
+
"scr_dir1_threshold_20": 0.6235295190117695,
|
112 |
+
"scr_metric_threshold_20": 0.10704221284666396,
|
113 |
+
"scr_dir2_threshold_20": 0.10704221284666396,
|
114 |
+
"scr_dir1_threshold_50": 0.5764706212343906,
|
115 |
+
"scr_metric_threshold_50": -0.008450762872251974,
|
116 |
+
"scr_dir2_threshold_50": -0.008450762872251974,
|
117 |
+
"scr_dir1_threshold_100": -1.5294110222262114,
|
118 |
+
"scr_metric_threshold_100": -0.02816898570697485,
|
119 |
+
"scr_dir2_threshold_100": -0.02816898570697485,
|
120 |
+
"scr_dir1_threshold_500": -4.070586593589067,
|
121 |
+
"scr_metric_threshold_500": -0.1380282314446553,
|
122 |
+
"scr_dir2_threshold_500": -0.1380282314446553
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.8360657019180974,
|
127 |
+
"scr_metric_threshold_2": 0.1789138968123705,
|
128 |
+
"scr_dir2_threshold_2": 0.1789138968123705,
|
129 |
+
"scr_dir1_threshold_5": 0.8442623191096432,
|
130 |
+
"scr_metric_threshold_5": 0.23961674787560874,
|
131 |
+
"scr_dir2_threshold_5": 0.23961674787560874,
|
132 |
+
"scr_dir1_threshold_10": 0.5409835745204756,
|
133 |
+
"scr_metric_threshold_10": 0.370607180854761,
|
134 |
+
"scr_dir2_threshold_10": 0.370607180854761,
|
135 |
+
"scr_dir1_threshold_20": 0.6557376808903568,
|
136 |
+
"scr_metric_threshold_20": 0.3993609925524668,
|
137 |
+
"scr_dir2_threshold_20": 0.3993609925524668,
|
138 |
+
"scr_dir1_threshold_50": 0.639344446507265,
|
139 |
+
"scr_metric_threshold_50": 0.5271565787241141,
|
140 |
+
"scr_dir2_threshold_50": 0.5271565787241141,
|
141 |
+
"scr_dir1_threshold_100": 0.8032787445891676,
|
142 |
+
"scr_metric_threshold_100": 0.41533560745031345,
|
143 |
+
"scr_dir2_threshold_100": 0.41533560745031345,
|
144 |
+
"scr_dir1_threshold_500": -1.6803285095904867,
|
145 |
+
"scr_metric_threshold_500": -0.5303514255316191,
|
146 |
+
"scr_dir2_threshold_500": -0.5303514255316191
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.5517244922986699,
|
151 |
+
"scr_metric_threshold_2": 0.08187133483727584,
|
152 |
+
"scr_dir2_threshold_2": 0.08187133483727584,
|
153 |
+
"scr_dir1_threshold_5": 0.5402303890469973,
|
154 |
+
"scr_metric_threshold_5": 0.06140341398666529,
|
155 |
+
"scr_dir2_threshold_5": 0.06140341398666529,
|
156 |
+
"scr_dir1_threshold_10": 0.5057473941812173,
|
157 |
+
"scr_metric_threshold_10": 0.14035080992602053,
|
158 |
+
"scr_dir2_threshold_10": 0.14035080992602053,
|
159 |
+
"scr_dir1_threshold_20": 0.5977015904161226,
|
160 |
+
"scr_metric_threshold_20": 0.201754398195269,
|
161 |
+
"scr_dir2_threshold_20": 0.201754398195269,
|
162 |
+
"scr_dir1_threshold_50": -0.37931020308053204,
|
163 |
+
"scr_metric_threshold_50": 0.31286547057691716,
|
164 |
+
"scr_dir2_threshold_50": 0.31286547057691716,
|
165 |
+
"scr_dir1_threshold_100": -0.32183900171140667,
|
166 |
+
"scr_metric_threshold_100": 0.41520472626480354,
|
167 |
+
"scr_dir2_threshold_100": 0.41520472626480354,
|
168 |
+
"scr_dir1_threshold_500": -0.5632179104395808,
|
169 |
+
"scr_metric_threshold_500": -0.36842100676774125,
|
170 |
+
"scr_dir2_threshold_500": -0.36842100676774125
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.5,
|
175 |
+
"scr_metric_threshold_2": 0.14022134215569965,
|
176 |
+
"scr_dir2_threshold_2": 0.14022134215569965,
|
177 |
+
"scr_dir1_threshold_5": -0.0937503026798999,
|
178 |
+
"scr_metric_threshold_5": 0.18081188116198693,
|
179 |
+
"scr_dir2_threshold_5": 0.18081188116198693,
|
180 |
+
"scr_dir1_threshold_10": 0.1437500419095246,
|
181 |
+
"scr_metric_threshold_10": 0.4649447744327332,
|
182 |
+
"scr_dir2_threshold_10": 0.4649447744327332,
|
183 |
+
"scr_dir1_threshold_20": -0.08125008847566305,
|
184 |
+
"scr_metric_threshold_20": 0.5239852585191744,
|
185 |
+
"scr_dir2_threshold_20": 0.5239852585191744,
|
186 |
+
"scr_dir1_threshold_50": -0.5687502468005338,
|
187 |
+
"scr_metric_threshold_50": 0.5830257426056157,
|
188 |
+
"scr_dir2_threshold_50": 0.5830257426056157,
|
189 |
+
"scr_dir1_threshold_100": -0.6312502002343953,
|
190 |
+
"scr_metric_threshold_100": 0.402214081386945,
|
191 |
+
"scr_dir2_threshold_100": 0.402214081386945,
|
192 |
+
"scr_dir1_threshold_500": -1.3812503864989492,
|
193 |
+
"scr_metric_threshold_500": -0.21402200224958023,
|
194 |
+
"scr_dir2_threshold_500": -0.21402200224958023
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.1543623785205732,
|
199 |
+
"scr_metric_threshold_2": -0.11374401022826881,
|
200 |
+
"scr_dir2_threshold_2": -0.11374401022826881,
|
201 |
+
"scr_dir1_threshold_5": 0.28187907381612815,
|
202 |
+
"scr_metric_threshold_5": -0.023696657027287907,
|
203 |
+
"scr_dir2_threshold_5": -0.023696657027287907,
|
204 |
+
"scr_dir1_threshold_10": 0.29530211544415846,
|
205 |
+
"scr_metric_threshold_10": 0.2417061276674841,
|
206 |
+
"scr_dir2_threshold_10": 0.2417061276674841,
|
207 |
+
"scr_dir1_threshold_20": 0.08724837047354875,
|
208 |
+
"scr_metric_threshold_20": 0.47393370551862674,
|
209 |
+
"scr_dir2_threshold_20": 0.47393370551862674,
|
210 |
+
"scr_dir1_threshold_50": 0.46979845636021356,
|
211 |
+
"scr_metric_threshold_50": 0.7393364902133988,
|
212 |
+
"scr_dir2_threshold_50": 0.7393364902133988,
|
213 |
+
"scr_dir1_threshold_100": 0.52348982281025,
|
214 |
+
"scr_metric_threshold_100": 0.8436019506253261,
|
215 |
+
"scr_dir2_threshold_100": 0.8436019506253261,
|
216 |
+
"scr_dir1_threshold_500": 0.2885907946456645,
|
217 |
+
"scr_metric_threshold_500": 0.8293838434143797,
|
218 |
+
"scr_dir2_threshold_500": 0.8293838434143797
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.2681159232499559,
|
223 |
+
"scr_metric_threshold_2": 0.16037725239472891,
|
224 |
+
"scr_dir2_threshold_2": 0.16037725239472891,
|
225 |
+
"scr_dir1_threshold_5": 0.3550721820826725,
|
226 |
+
"scr_metric_threshold_5": 0.3867925111828991,
|
227 |
+
"scr_dir2_threshold_5": 0.3867925111828991,
|
228 |
+
"scr_dir1_threshold_10": 0.5144925658328928,
|
229 |
+
"scr_metric_threshold_10": 0.6037733674091943,
|
230 |
+
"scr_dir2_threshold_10": 0.6037733674091943,
|
231 |
+
"scr_dir1_threshold_20": 0.557971127166931,
|
232 |
+
"scr_metric_threshold_20": 0.6462262108598532,
|
233 |
+
"scr_dir2_threshold_20": 0.6462262108598532,
|
234 |
+
"scr_dir1_threshold_50": 0.20289851316657856,
|
235 |
+
"scr_metric_threshold_50": 0.7688678210848607,
|
236 |
+
"scr_dir2_threshold_50": 0.7688678210848607,
|
237 |
+
"scr_dir1_threshold_100": 0.47826071933298087,
|
238 |
+
"scr_metric_threshold_100": 0.8254717060703953,
|
239 |
+
"scr_dir2_threshold_100": 0.8254717060703953,
|
240 |
+
"scr_dir1_threshold_500": -2.253623357417063,
|
241 |
+
"scr_metric_threshold_500": -0.4198115143796197,
|
242 |
+
"scr_dir2_threshold_500": -0.4198115143796197
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.16535450439689736,
|
247 |
+
"scr_metric_threshold_2": 0.10843376954834762,
|
248 |
+
"scr_dir2_threshold_2": 0.10843376954834762,
|
249 |
+
"scr_dir1_threshold_5": 0.3149607149176306,
|
250 |
+
"scr_metric_threshold_5": 0.2931727311397389,
|
251 |
+
"scr_dir2_threshold_5": 0.2931727311397389,
|
252 |
+
"scr_dir1_threshold_10": 0.4881891315887168,
|
253 |
+
"scr_metric_threshold_10": 0.30120475582802936,
|
254 |
+
"scr_dir2_threshold_10": 0.30120475582802936,
|
255 |
+
"scr_dir1_threshold_20": 0.5984253114107195,
|
256 |
+
"scr_metric_threshold_20": 0.3614458984944921,
|
257 |
+
"scr_dir2_threshold_20": 0.3614458984944921,
|
258 |
+
"scr_dir1_threshold_50": 0.5433069868358249,
|
259 |
+
"scr_metric_threshold_50": 0.542168608365667,
|
260 |
+
"scr_dir2_threshold_50": 0.542168608365667,
|
261 |
+
"scr_dir1_threshold_100": 0.42519689473963335,
|
262 |
+
"scr_metric_threshold_100": 0.5261043196130151,
|
263 |
+
"scr_dir2_threshold_100": 0.5261043196130151,
|
264 |
+
"scr_dir1_threshold_500": -0.6377948727816636,
|
265 |
+
"scr_metric_threshold_500": -0.31726904458068134,
|
266 |
+
"scr_dir2_threshold_500": -0.31726904458068134
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.13939393282593532,
|
271 |
+
"scr_metric_threshold_2": 0.01666678256458246,
|
272 |
+
"scr_dir2_threshold_2": 0.01666678256458246,
|
273 |
+
"scr_dir1_threshold_5": 0.19999992775195513,
|
274 |
+
"scr_metric_threshold_5": -0.07777776673988103,
|
275 |
+
"scr_dir2_threshold_5": -0.07777776673988103,
|
276 |
+
"scr_dir1_threshold_10": 0.29090892014098485,
|
277 |
+
"scr_metric_threshold_10": 0.04444453274761838,
|
278 |
+
"scr_dir2_threshold_10": 0.04444453274761838,
|
279 |
+
"scr_dir1_threshold_20": 0.12121198985203961,
|
280 |
+
"scr_metric_threshold_20": 0.188889098608927,
|
281 |
+
"scr_dir2_threshold_20": 0.188889098608927,
|
282 |
+
"scr_dir1_threshold_50": 0.41818179847780595,
|
283 |
+
"scr_metric_threshold_50": 0.1333332671059529,
|
284 |
+
"scr_dir2_threshold_50": 0.1333332671059529,
|
285 |
+
"scr_dir1_threshold_100": 0.3030303358703234,
|
286 |
+
"scr_metric_threshold_100": -0.5388888833699406,
|
287 |
+
"scr_dir2_threshold_100": -0.5388888833699406,
|
288 |
+
"scr_dir1_threshold_500": -0.6363636692036567,
|
289 |
+
"scr_metric_threshold_500": -0.3999998013178586,
|
290 |
+
"scr_dir2_threshold_500": -0.3999998013178586
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpReluTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_34",
|
296 |
+
"sae_lens_version": "5.3.0",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "jumprelu",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_Standard_pythia-160m-deduped__0108_resid_post_layer_8_trainer_11_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 125,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": false,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "87b4c089-263d-4bdf-80f8-6923cf0c450f",
|
73 |
+
"datetime_epoch_millis": 1736486048846,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.17292759970446409,
|
77 |
+
"scr_metric_threshold_2": 0.0666525759061695,
|
78 |
+
"scr_dir2_threshold_2": 0.0666525759061695,
|
79 |
+
"scr_dir1_threshold_5": 0.1801288471088491,
|
80 |
+
"scr_metric_threshold_5": 0.09678535978214271,
|
81 |
+
"scr_dir2_threshold_5": 0.09678535978214271,
|
82 |
+
"scr_dir1_threshold_10": 0.2320928876957574,
|
83 |
+
"scr_metric_threshold_10": 0.13560633628848898,
|
84 |
+
"scr_dir2_threshold_10": 0.13560633628848898,
|
85 |
+
"scr_dir1_threshold_20": 0.2152403448230175,
|
86 |
+
"scr_metric_threshold_20": 0.17149562734444185,
|
87 |
+
"scr_dir2_threshold_20": 0.17149562734444185,
|
88 |
+
"scr_dir1_threshold_50": 0.2497616732004487,
|
89 |
+
"scr_metric_threshold_50": 0.21787723368844783,
|
90 |
+
"scr_dir2_threshold_50": 0.21787723368844783,
|
91 |
+
"scr_dir1_threshold_100": 0.1649864594733625,
|
92 |
+
"scr_metric_threshold_100": 0.16728053396493264,
|
93 |
+
"scr_dir2_threshold_100": 0.16728053396493264,
|
94 |
+
"scr_dir1_threshold_500": -0.07504506091718673,
|
95 |
+
"scr_metric_threshold_500": 0.08918369127841275,
|
96 |
+
"scr_dir2_threshold_500": 0.08918369127841275
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.3647059318515859,
|
103 |
+
"scr_metric_threshold_2": 0.016901357844105153,
|
104 |
+
"scr_dir2_threshold_2": 0.016901357844105153,
|
105 |
+
"scr_dir1_threshold_5": 0.45882372740634364,
|
106 |
+
"scr_metric_threshold_5": 0.06478873428620167,
|
107 |
+
"scr_dir2_threshold_5": 0.06478873428620167,
|
108 |
+
"scr_dir1_threshold_10": 0.4823528256796329,
|
109 |
+
"scr_metric_threshold_10": 0.10985907783728167,
|
110 |
+
"scr_dir2_threshold_10": 0.10985907783728167,
|
111 |
+
"scr_dir1_threshold_20": 0.4000002804923201,
|
112 |
+
"scr_metric_threshold_20": 0.13239433356302108,
|
113 |
+
"scr_dir2_threshold_20": 0.13239433356302108,
|
114 |
+
"scr_dir1_threshold_50": 0.447059178269699,
|
115 |
+
"scr_metric_threshold_50": 0.16056331926999592,
|
116 |
+
"scr_dir2_threshold_50": 0.16056331926999592,
|
117 |
+
"scr_dir1_threshold_100": -0.44705847703889867,
|
118 |
+
"scr_metric_threshold_100": 0.005633729981235451,
|
119 |
+
"scr_dir2_threshold_100": 0.005633729981235451,
|
120 |
+
"scr_dir1_threshold_500": -0.7529409619764611,
|
121 |
+
"scr_metric_threshold_500": 0.09577458498379425,
|
122 |
+
"scr_dir2_threshold_500": 0.09577458498379425
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.4508198082879784,
|
127 |
+
"scr_metric_threshold_2": 0.04472842659555243,
|
128 |
+
"scr_dir2_threshold_2": 0.04472842659555243,
|
129 |
+
"scr_dir1_threshold_5": 0.516393722945838,
|
130 |
+
"scr_metric_threshold_5": 0.09265189042877064,
|
131 |
+
"scr_dir2_threshold_5": 0.09265189042877064,
|
132 |
+
"scr_dir1_threshold_10": 0.5819671490409514,
|
133 |
+
"scr_metric_threshold_10": 0.1437700106393331,
|
134 |
+
"scr_dir2_threshold_10": 0.1437700106393331,
|
135 |
+
"scr_dir1_threshold_20": 0.5327869573289298,
|
136 |
+
"scr_metric_threshold_20": 0.19808316808756132,
|
137 |
+
"scr_dir2_threshold_20": 0.19808316808756132,
|
138 |
+
"scr_dir1_threshold_50": 0.4344260853421405,
|
139 |
+
"scr_metric_threshold_50": 0.303514445746352,
|
140 |
+
"scr_dir2_threshold_50": 0.303514445746352,
|
141 |
+
"scr_dir1_threshold_100": 0.40163961657595687,
|
142 |
+
"scr_metric_threshold_100": 0.10543146808895146,
|
143 |
+
"scr_dir2_threshold_100": 0.10543146808895146,
|
144 |
+
"scr_dir1_threshold_500": -0.09016376623249724,
|
145 |
+
"scr_metric_threshold_500": -0.2555909819131338,
|
146 |
+
"scr_dir2_threshold_500": -0.2555909819131338
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.25287369709060864,
|
151 |
+
"scr_metric_threshold_2": 0.03216367644229295,
|
152 |
+
"scr_dir2_threshold_2": 0.03216367644229295,
|
153 |
+
"scr_dir1_threshold_5": 0.2988507952080613,
|
154 |
+
"scr_metric_threshold_5": 0.052631597292903495,
|
155 |
+
"scr_dir2_threshold_5": 0.052631597292903495,
|
156 |
+
"scr_dir1_threshold_10": 0.310344898459734,
|
157 |
+
"scr_metric_threshold_10": 0.08187133483727584,
|
158 |
+
"scr_dir2_threshold_10": 0.08187133483727584,
|
159 |
+
"scr_dir1_threshold_20": 0.2758625887047159,
|
160 |
+
"scr_metric_threshold_20": 0.11403501127956878,
|
161 |
+
"scr_dir2_threshold_20": 0.11403501127956878,
|
162 |
+
"scr_dir1_threshold_50": 0.310344898459734,
|
163 |
+
"scr_metric_threshold_50": 0.21637426696745518,
|
164 |
+
"scr_dir2_threshold_50": 0.21637426696745518,
|
165 |
+
"scr_dir1_threshold_100": 0.2873566919563886,
|
166 |
+
"scr_metric_threshold_100": 0.23684201353548254,
|
167 |
+
"scr_dir2_threshold_100": 0.23684201353548254,
|
168 |
+
"scr_dir1_threshold_500": -0.41379251283555013,
|
169 |
+
"scr_metric_threshold_500": 0.10233925568788639,
|
170 |
+
"scr_dir2_threshold_500": 0.10233925568788639
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.10624977182592162,
|
175 |
+
"scr_metric_threshold_2": 0.04797051703834888,
|
176 |
+
"scr_dir2_threshold_2": 0.04797051703834888,
|
177 |
+
"scr_dir1_threshold_5": 0.24374989289788157,
|
178 |
+
"scr_metric_threshold_5": 0.07380088003719681,
|
179 |
+
"scr_dir2_threshold_5": 0.07380088003719681,
|
180 |
+
"scr_dir1_threshold_10": 0.24374989289788157,
|
181 |
+
"scr_metric_threshold_10": 0.1180811881161987,
|
182 |
+
"scr_dir2_threshold_10": 0.1180811881161987,
|
183 |
+
"scr_dir1_threshold_20": 0.2625000279396831,
|
184 |
+
"scr_metric_threshold_20": 0.17343168318660915,
|
185 |
+
"scr_dir2_threshold_20": 0.17343168318660915,
|
186 |
+
"scr_dir1_threshold_50": 0.31875006053598,
|
187 |
+
"scr_metric_threshold_50": 0.2730627062793377,
|
188 |
+
"scr_dir2_threshold_50": 0.2730627062793377,
|
189 |
+
"scr_dir1_threshold_100": 0.2999999254941785,
|
190 |
+
"scr_metric_threshold_100": 0.21771221120892723,
|
191 |
+
"scr_dir2_threshold_100": 0.21771221120892723,
|
192 |
+
"scr_dir1_threshold_500": 0.03749989755449542,
|
193 |
+
"scr_metric_threshold_500": 0.2767526952953685,
|
194 |
+
"scr_dir2_threshold_500": 0.2767526952953685
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.026845683225018263,
|
199 |
+
"scr_metric_threshold_2": 0.2511849599702598,
|
200 |
+
"scr_dir2_threshold_2": 0.2511849599702598,
|
201 |
+
"scr_dir1_threshold_5": -0.33557044026616467,
|
202 |
+
"scr_metric_threshold_5": 0.29857827402483567,
|
203 |
+
"scr_dir2_threshold_5": 0.29857827402483567,
|
204 |
+
"scr_dir1_threshold_10": -0.20134202414107336,
|
205 |
+
"scr_metric_threshold_10": 0.34123231317124075,
|
206 |
+
"scr_dir2_threshold_10": 0.34123231317124075,
|
207 |
+
"scr_dir1_threshold_20": -0.1879193825440854,
|
208 |
+
"scr_metric_threshold_20": 0.38388635231764584,
|
209 |
+
"scr_dir2_threshold_20": 0.38388635231764584,
|
210 |
+
"scr_dir1_threshold_50": -0.12751669529555493,
|
211 |
+
"scr_metric_threshold_50": 0.3744075200148701,
|
212 |
+
"scr_dir2_threshold_50": 0.3744075200148701,
|
213 |
+
"scr_dir1_threshold_100": -0.07382532884551841,
|
214 |
+
"scr_metric_threshold_100": 0.3696682451066994,
|
215 |
+
"scr_dir2_threshold_100": 0.3696682451066994,
|
216 |
+
"scr_dir1_threshold_500": -0.013422641596987927,
|
217 |
+
"scr_metric_threshold_500": 0.21327019573202546,
|
218 |
+
"scr_dir2_threshold_500": 0.21327019573202546
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.13043482016675478,
|
223 |
+
"scr_metric_threshold_2": 0.05660360383156628,
|
224 |
+
"scr_dir2_threshold_2": 0.05660360383156628,
|
225 |
+
"scr_dir1_threshold_5": 0.15217366891609393,
|
226 |
+
"scr_metric_threshold_5": 0.08962260702828691,
|
227 |
+
"scr_dir2_threshold_5": 0.08962260702828691,
|
228 |
+
"scr_dir1_threshold_10": 0.22463779383359767,
|
229 |
+
"scr_metric_threshold_10": 0.14150929073288404,
|
230 |
+
"scr_dir2_threshold_10": 0.14150929073288404,
|
231 |
+
"scr_dir1_threshold_20": 0.15217366891609393,
|
232 |
+
"scr_metric_threshold_20": 0.1981131757184187,
|
233 |
+
"scr_dir2_threshold_20": 0.1981131757184187,
|
234 |
+
"scr_dir1_threshold_50": 0.14492738599964755,
|
235 |
+
"scr_metric_threshold_50": 0.2452829392960467,
|
236 |
+
"scr_dir2_threshold_50": 0.2452829392960467,
|
237 |
+
"scr_dir1_threshold_100": 0.18840551541600584,
|
238 |
+
"scr_metric_threshold_100": 0.22169805750723268,
|
239 |
+
"scr_dir2_threshold_100": 0.22169805750723268,
|
240 |
+
"scr_dir1_threshold_500": 0.18115923249955945,
|
241 |
+
"scr_metric_threshold_500": 0.24999985942301584,
|
242 |
+
"scr_dir2_threshold_500": 0.24999985942301584
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.03937003069873052,
|
247 |
+
"scr_metric_threshold_2": 0.02811256516115874,
|
248 |
+
"scr_dir2_threshold_2": 0.02811256516115874,
|
249 |
+
"scr_dir1_threshold_5": 0.09448835527362513,
|
250 |
+
"scr_metric_threshold_5": 0.052208878602101196,
|
251 |
+
"scr_dir2_threshold_5": 0.052208878602101196,
|
252 |
+
"scr_dir1_threshold_10": 0.1968506228214391,
|
253 |
+
"scr_metric_threshold_10": 0.07630519204304365,
|
254 |
+
"scr_dir2_threshold_10": 0.07630519204304365,
|
255 |
+
"scr_dir1_threshold_20": 0.2440945657943584,
|
256 |
+
"scr_metric_threshold_20": 0.11646579423663808,
|
257 |
+
"scr_dir2_threshold_20": 0.11646579423663808,
|
258 |
+
"scr_dir1_threshold_50": 0.33070853946600826,
|
259 |
+
"scr_metric_threshold_50": 0.06425715501060796,
|
260 |
+
"scr_dir2_threshold_50": 0.06425715501060796,
|
261 |
+
"scr_dir1_threshold_100": 0.43307080701382217,
|
262 |
+
"scr_metric_threshold_100": 0.09236948079569564,
|
263 |
+
"scr_dir2_threshold_100": 0.09236948079569564,
|
264 |
+
"scr_dir1_threshold_500": 0.35433074561636113,
|
265 |
+
"scr_metric_threshold_500": 0.06425715501060796,
|
266 |
+
"scr_dir2_threshold_500": 0.06425715501060796
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.012121054489114207,
|
271 |
+
"scr_metric_threshold_2": 0.05555550036607185,
|
272 |
+
"scr_dir2_threshold_2": 0.05555550036607185,
|
273 |
+
"scr_dir1_threshold_5": 0.012121054489114207,
|
274 |
+
"scr_metric_threshold_5": 0.05000001655684511,
|
275 |
+
"scr_dir2_threshold_5": 0.05000001655684511,
|
276 |
+
"scr_dir1_threshold_10": 0.018181942973895696,
|
277 |
+
"scr_metric_threshold_10": 0.0722222829306543,
|
278 |
+
"scr_dir2_threshold_10": 0.0722222829306543,
|
279 |
+
"scr_dir1_threshold_20": 0.04242405195212411,
|
280 |
+
"scr_metric_threshold_20": 0.05555550036607185,
|
281 |
+
"scr_dir2_threshold_20": 0.05555550036607185,
|
282 |
+
"scr_dir1_threshold_50": 0.13939393282593532,
|
283 |
+
"scr_metric_threshold_50": 0.10555551692291695,
|
284 |
+
"scr_dir2_threshold_50": 0.10555551692291695,
|
285 |
+
"scr_dir1_threshold_100": 0.23030292521496504,
|
286 |
+
"scr_metric_threshold_100": 0.08888906549523676,
|
287 |
+
"scr_dir2_threshold_100": 0.08888906549523676,
|
288 |
+
"scr_dir1_threshold_500": 0.09696951963358681,
|
289 |
+
"scr_metric_threshold_500": -0.03333323399226266,
|
290 |
+
"scr_dir2_threshold_500": -0.03333323399226266
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_StandardTrainerAprilUpdate_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_11",
|
296 |
+
"sae_lens_version": "5.3.0",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "standard_april_update",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_Standard_pythia-160m-deduped__0108_resid_post_layer_8_trainer_6_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 125,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": false,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "df67adde-448a-41cc-8530-060d071bc985",
|
73 |
+
"datetime_epoch_millis": 1736486188217,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.29277373281882163,
|
77 |
+
"scr_metric_threshold_2": 0.09920525987297542,
|
78 |
+
"scr_dir2_threshold_2": 0.09920525987297542,
|
79 |
+
"scr_dir1_threshold_5": 0.33109758895582847,
|
80 |
+
"scr_metric_threshold_5": 0.1467549847216187,
|
81 |
+
"scr_dir2_threshold_5": 0.1467549847216187,
|
82 |
+
"scr_dir1_threshold_10": 0.34521358931236057,
|
83 |
+
"scr_metric_threshold_10": 0.20007078556587554,
|
84 |
+
"scr_dir2_threshold_10": 0.20007078556587554,
|
85 |
+
"scr_dir1_threshold_20": 0.388823950952442,
|
86 |
+
"scr_metric_threshold_20": 0.25290518157444336,
|
87 |
+
"scr_dir2_threshold_20": 0.25290518157444336,
|
88 |
+
"scr_dir1_threshold_50": 0.4435317459160451,
|
89 |
+
"scr_metric_threshold_50": 0.324288087189177,
|
90 |
+
"scr_dir2_threshold_50": 0.324288087189177,
|
91 |
+
"scr_dir1_threshold_100": 0.35072651769035357,
|
92 |
+
"scr_metric_threshold_100": 0.3437259076212614,
|
93 |
+
"scr_dir2_threshold_100": 0.3437259076212614,
|
94 |
+
"scr_dir1_threshold_500": -0.03543931184243958,
|
95 |
+
"scr_metric_threshold_500": 0.06266399384147664,
|
96 |
+
"scr_dir2_threshold_500": 0.06266399384147664
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.5294117234570117,
|
103 |
+
"scr_metric_threshold_2": 0.0028168649906177255,
|
104 |
+
"scr_dir2_threshold_2": 0.0028168649906177255,
|
105 |
+
"scr_dir1_threshold_5": 0.5882351703710352,
|
106 |
+
"scr_metric_threshold_5": 0.022535087825340604,
|
107 |
+
"scr_dir2_threshold_5": 0.022535087825340604,
|
108 |
+
"scr_dir1_threshold_10": 0.5999997195076798,
|
109 |
+
"scr_metric_threshold_10": 0.0676055992768194,
|
110 |
+
"scr_dir2_threshold_10": 0.0676055992768194,
|
111 |
+
"scr_dir1_threshold_20": 0.5529415229611013,
|
112 |
+
"scr_metric_threshold_20": 0.12394357069076911,
|
113 |
+
"scr_dir2_threshold_20": 0.12394357069076911,
|
114 |
+
"scr_dir1_threshold_50": 0.5764706212343906,
|
115 |
+
"scr_metric_threshold_50": -0.02816898570697485,
|
116 |
+
"scr_dir2_threshold_50": -0.02816898570697485,
|
117 |
+
"scr_dir1_threshold_100": 0.2941179358009178,
|
118 |
+
"scr_metric_threshold_100": -0.04225364646086107,
|
119 |
+
"scr_dir2_threshold_100": -0.04225364646086107,
|
120 |
+
"scr_dir1_threshold_500": 0.07058869728146844,
|
121 |
+
"scr_metric_threshold_500": 0.08732382211154228,
|
122 |
+
"scr_dir2_threshold_500": 0.08732382211154228
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.7131149783566704,
|
127 |
+
"scr_metric_threshold_2": 0.07348242872341905,
|
128 |
+
"scr_dir2_threshold_2": 0.07348242872341905,
|
129 |
+
"scr_dir1_threshold_5": 0.7459019356856001,
|
130 |
+
"scr_metric_threshold_5": 0.1437700106393331,
|
131 |
+
"scr_dir2_threshold_5": 0.1437700106393331,
|
132 |
+
"scr_dir1_threshold_10": 0.762295170068692,
|
133 |
+
"scr_metric_threshold_10": 0.2012780148950663,
|
134 |
+
"scr_dir2_threshold_10": 0.2012780148950663,
|
135 |
+
"scr_dir1_threshold_20": 0.7459019356856001,
|
136 |
+
"scr_metric_threshold_20": 0.29073486808617116,
|
137 |
+
"scr_dir2_threshold_20": 0.29073486808617116,
|
138 |
+
"scr_dir1_threshold_50": 0.8196724675350056,
|
139 |
+
"scr_metric_threshold_50": 0.4568689968082001,
|
140 |
+
"scr_dir2_threshold_50": 0.4568689968082001,
|
141 |
+
"scr_dir1_threshold_100": 0.8278690847265514,
|
142 |
+
"scr_metric_threshold_100": 0.5463258499993049,
|
143 |
+
"scr_dir2_threshold_100": 0.5463258499993049,
|
144 |
+
"scr_dir1_threshold_500": -0.17213140383619469,
|
145 |
+
"scr_metric_threshold_500": -0.4376995351028485,
|
146 |
+
"scr_dir2_threshold_500": -0.4376995351028485
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.4827591876778719,
|
151 |
+
"scr_metric_threshold_2": 0.049707658394982895,
|
152 |
+
"scr_dir2_threshold_2": 0.049707658394982895,
|
153 |
+
"scr_dir1_threshold_5": 0.49425329092954456,
|
154 |
+
"scr_metric_threshold_5": 0.07309934386093087,
|
155 |
+
"scr_dir2_threshold_5": 0.07309934386093087,
|
156 |
+
"scr_dir1_threshold_10": 0.4597702960637646,
|
157 |
+
"scr_metric_threshold_10": 0.10526319458580699,
|
158 |
+
"scr_dir2_threshold_10": 0.10526319458580699,
|
159 |
+
"scr_dir1_threshold_20": 0.4482761928120919,
|
160 |
+
"scr_metric_threshold_20": 0.14912280090236552,
|
161 |
+
"scr_dir2_threshold_20": 0.14912280090236552,
|
162 |
+
"scr_dir1_threshold_50": 0.3563219965771866,
|
163 |
+
"scr_metric_threshold_50": 0.2485379434097481,
|
164 |
+
"scr_dir2_threshold_50": 0.2485379434097481,
|
165 |
+
"scr_dir1_threshold_100": 0.32183900171140667,
|
166 |
+
"scr_metric_threshold_100": 0.3099415316789966,
|
167 |
+
"scr_dir2_threshold_100": 0.3099415316789966,
|
168 |
+
"scr_dir1_threshold_500": -1.2643671152315195,
|
169 |
+
"scr_metric_threshold_500": -0.2485379434097481,
|
170 |
+
"scr_dir2_threshold_500": -0.2485379434097481
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.3062498463317431,
|
175 |
+
"scr_metric_threshold_2": 0.0774908690532276,
|
176 |
+
"scr_dir2_threshold_2": 0.0774908690532276,
|
177 |
+
"scr_dir1_threshold_5": 0.28124979045237697,
|
178 |
+
"scr_metric_threshold_5": 0.1143911991001679,
|
179 |
+
"scr_dir2_threshold_5": 0.1143911991001679,
|
180 |
+
"scr_dir1_threshold_10": 0.21874983701851544,
|
181 |
+
"scr_metric_threshold_10": 0.18081188116198693,
|
182 |
+
"scr_dir2_threshold_10": 0.18081188116198693,
|
183 |
+
"scr_dir1_threshold_20": 0.29375000465661383,
|
184 |
+
"scr_metric_threshold_20": 0.26199273923124533,
|
185 |
+
"scr_dir2_threshold_20": 0.26199273923124533,
|
186 |
+
"scr_dir1_threshold_50": 0.41874991152433694,
|
187 |
+
"scr_metric_threshold_50": 0.4612545654733862,
|
188 |
+
"scr_dir2_threshold_50": 0.4612545654733862,
|
189 |
+
"scr_dir1_threshold_100": 0.3062498463317431,
|
190 |
+
"scr_metric_threshold_100": 0.4206642464104151,
|
191 |
+
"scr_dir2_threshold_100": 0.4206642464104151,
|
192 |
+
"scr_dir1_threshold_500": 0.16875009778889075,
|
193 |
+
"scr_metric_threshold_500": -0.1180811881161987,
|
194 |
+
"scr_dir2_threshold_500": -0.1180811881161987
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.053691366450036526,
|
199 |
+
"scr_metric_threshold_2": 0.38388635231764584,
|
200 |
+
"scr_dir2_threshold_2": 0.38388635231764584,
|
201 |
+
"scr_dir1_threshold_5": 0.10067101207053668,
|
202 |
+
"scr_metric_threshold_5": 0.4218011165558802,
|
203 |
+
"scr_dir2_threshold_5": 0.4218011165558802,
|
204 |
+
"scr_dir1_threshold_10": -0.16107369931906715,
|
205 |
+
"scr_metric_threshold_10": 0.4597155983076803,
|
206 |
+
"scr_dir2_threshold_10": 0.4597155983076803,
|
207 |
+
"scr_dir1_threshold_20": 0.0,
|
208 |
+
"scr_metric_threshold_20": 0.47867298042679746,
|
209 |
+
"scr_dir2_threshold_20": 0.47867298042679746,
|
210 |
+
"scr_dir1_threshold_50": 0.1543623785205732,
|
211 |
+
"scr_metric_threshold_50": 0.5071091948486903,
|
212 |
+
"scr_dir2_threshold_50": 0.5071091948486903,
|
213 |
+
"scr_dir1_threshold_100": -0.4496640939336893,
|
214 |
+
"scr_metric_threshold_100": 0.5971565480496713,
|
215 |
+
"scr_dir2_threshold_100": 0.5971565480496713,
|
216 |
+
"scr_dir1_threshold_500": -0.20134202414107336,
|
217 |
+
"scr_metric_threshold_500": 0.402843734436763,
|
218 |
+
"scr_dir2_threshold_500": 0.402843734436763
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.15942038375022027,
|
223 |
+
"scr_metric_threshold_2": 0.08490568690131776,
|
224 |
+
"scr_dir2_threshold_2": 0.08490568690131776,
|
225 |
+
"scr_dir1_threshold_5": 0.23188407675004405,
|
226 |
+
"scr_metric_threshold_5": 0.12735853035197664,
|
227 |
+
"scr_dir2_threshold_5": 0.12735853035197664,
|
228 |
+
"scr_dir1_threshold_10": 0.3985507434167107,
|
229 |
+
"scr_metric_threshold_10": 0.21226393609932606,
|
230 |
+
"scr_dir2_threshold_10": 0.21226393609932606,
|
231 |
+
"scr_dir1_threshold_20": 0.4275363070001762,
|
232 |
+
"scr_metric_threshold_20": 0.3349055463243336,
|
233 |
+
"scr_dir2_threshold_20": 0.3349055463243336,
|
234 |
+
"scr_dir1_threshold_50": 0.5144925658328928,
|
235 |
+
"scr_metric_threshold_50": 0.3962263514368374,
|
236 |
+
"scr_dir2_threshold_50": 0.3962263514368374,
|
237 |
+
"scr_dir1_threshold_100": 0.5869562588327165,
|
238 |
+
"scr_metric_threshold_100": 0.4150943130986822,
|
239 |
+
"scr_dir2_threshold_100": 0.4150943130986822,
|
240 |
+
"scr_dir1_threshold_500": 0.4492751557495154,
|
241 |
+
"scr_metric_threshold_500": 0.5801884856203803,
|
242 |
+
"scr_dir2_threshold_500": 0.5801884856203803
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.05511832457489462,
|
247 |
+
"scr_metric_threshold_2": 0.06024090329039166,
|
248 |
+
"scr_dir2_threshold_2": 0.06024090329039166,
|
249 |
+
"scr_dir1_threshold_5": 0.13385838597235566,
|
250 |
+
"scr_metric_threshold_5": 0.10441775720420239,
|
251 |
+
"scr_dir2_threshold_5": 0.10441775720420239,
|
252 |
+
"scr_dir1_threshold_10": 0.36220465789054995,
|
253 |
+
"scr_metric_threshold_10": 0.18473896159139128,
|
254 |
+
"scr_dir2_threshold_10": 0.18473896159139128,
|
255 |
+
"scr_dir1_threshold_20": 0.39370077631509165,
|
256 |
+
"scr_metric_threshold_20": 0.24497986488178294,
|
257 |
+
"scr_dir2_threshold_20": 0.24497986488178294,
|
258 |
+
"scr_dir1_threshold_50": 0.4960630438629056,
|
259 |
+
"scr_metric_threshold_50": 0.34136559739769484,
|
260 |
+
"scr_dir2_threshold_50": 0.34136559739769484,
|
261 |
+
"scr_dir1_threshold_100": 0.5669291929861778,
|
262 |
+
"scr_metric_threshold_100": 0.441767102881681,
|
263 |
+
"scr_dir2_threshold_100": 0.441767102881681,
|
264 |
+
"scr_dir1_threshold_500": 0.5748031052603666,
|
265 |
+
"scr_metric_threshold_500": 0.4297188264731742,
|
266 |
+
"scr_dir2_threshold_500": 0.4297188264731742
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.04242405195212411,
|
271 |
+
"scr_metric_threshold_2": 0.06111131531220084,
|
272 |
+
"scr_dir2_threshold_2": 0.06111131531220084,
|
273 |
+
"scr_dir1_threshold_5": 0.07272704941513401,
|
274 |
+
"scr_metric_threshold_5": 0.1666668322351178,
|
275 |
+
"scr_dir2_threshold_5": 0.1666668322351178,
|
276 |
+
"scr_dir1_threshold_10": 0.12121198985203961,
|
277 |
+
"scr_metric_threshold_10": 0.188889098608927,
|
278 |
+
"scr_dir2_threshold_10": 0.188889098608927,
|
279 |
+
"scr_dir1_threshold_20": 0.24848486818886073,
|
280 |
+
"scr_metric_threshold_20": 0.13888908205208186,
|
281 |
+
"scr_dir2_threshold_20": 0.13888908205208186,
|
282 |
+
"scr_dir1_threshold_50": 0.21212098224106932,
|
283 |
+
"scr_metric_threshold_50": 0.2111110338458339,
|
284 |
+
"scr_dir2_threshold_50": 0.2111110338458339,
|
285 |
+
"scr_dir1_threshold_100": 0.35151491506700466,
|
286 |
+
"scr_metric_threshold_100": 0.06111131531220084,
|
287 |
+
"scr_dir2_threshold_100": 0.06111131531220084,
|
288 |
+
"scr_dir1_threshold_500": 0.09090899238902972,
|
289 |
+
"scr_metric_threshold_500": -0.19444425128125145,
|
290 |
+
"scr_dir2_threshold_500": -0.19444425128125145
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_StandardTrainerAprilUpdate_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_6",
|
296 |
+
"sae_lens_version": "5.3.0",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "standard_april_update",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_Standard_pythia-160m-deduped__0108_resid_post_layer_8_trainer_8_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 125,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": false,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "dbf44c29-7bfe-4d1c-a1cc-50dfd359a2fa",
|
73 |
+
"datetime_epoch_millis": 1736486466828,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.27791216236630273,
|
77 |
+
"scr_metric_threshold_2": 0.10047036492466142,
|
78 |
+
"scr_dir2_threshold_2": 0.10047036492466142,
|
79 |
+
"scr_dir1_threshold_5": 0.3293466290569335,
|
80 |
+
"scr_metric_threshold_5": 0.14729737548426816,
|
81 |
+
"scr_dir2_threshold_5": 0.14729737548426816,
|
82 |
+
"scr_dir1_threshold_10": 0.29873426684041654,
|
83 |
+
"scr_metric_threshold_10": 0.19575010207524865,
|
84 |
+
"scr_dir2_threshold_10": 0.19575010207524865,
|
85 |
+
"scr_dir1_threshold_20": 0.2887665164299532,
|
86 |
+
"scr_metric_threshold_20": 0.2516083317654872,
|
87 |
+
"scr_dir2_threshold_20": 0.2516083317654872,
|
88 |
+
"scr_dir1_threshold_50": 0.2993966938863816,
|
89 |
+
"scr_metric_threshold_50": 0.31246039719309887,
|
90 |
+
"scr_dir2_threshold_50": 0.31246039719309887,
|
91 |
+
"scr_dir1_threshold_100": 0.27069250672700174,
|
92 |
+
"scr_metric_threshold_100": 0.3122443973754755,
|
93 |
+
"scr_dir2_threshold_100": 0.3122443973754755,
|
94 |
+
"scr_dir1_threshold_500": 0.010412002104000333,
|
95 |
+
"scr_metric_threshold_500": 0.1030536777008616,
|
96 |
+
"scr_dir2_threshold_500": 0.1030536777008616
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.5176471743203671,
|
103 |
+
"scr_metric_threshold_2": 0.014084492853487425,
|
104 |
+
"scr_dir2_threshold_2": 0.014084492853487425,
|
105 |
+
"scr_dir1_threshold_5": 0.5999997195076798,
|
106 |
+
"scr_metric_threshold_5": 0.030985850697592576,
|
107 |
+
"scr_dir2_threshold_5": 0.030985850697592576,
|
108 |
+
"scr_dir1_threshold_10": 0.5647060720977459,
|
109 |
+
"scr_metric_threshold_10": 0.0563379714139497,
|
110 |
+
"scr_dir2_threshold_10": 0.0563379714139497,
|
111 |
+
"scr_dir1_threshold_20": 0.6941175150624376,
|
112 |
+
"scr_metric_threshold_20": 0.12394357069076911,
|
113 |
+
"scr_dir2_threshold_20": 0.12394357069076911,
|
114 |
+
"scr_dir1_threshold_50": 0.6705884167891483,
|
115 |
+
"scr_metric_threshold_50": -0.025352120716357125,
|
116 |
+
"scr_dir2_threshold_50": -0.025352120716357125,
|
117 |
+
"scr_dir1_threshold_100": 0.10588234469140236,
|
118 |
+
"scr_metric_threshold_100": -0.008450762872251974,
|
119 |
+
"scr_dir2_threshold_100": -0.008450762872251974,
|
120 |
+
"scr_dir1_threshold_500": -0.1764703407420705,
|
121 |
+
"scr_metric_threshold_500": 0.18309857499573534,
|
122 |
+
"scr_dir2_threshold_500": 0.18309857499573534
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.6639342980819026,
|
127 |
+
"scr_metric_threshold_2": 0.08945685319110486,
|
128 |
+
"scr_dir2_threshold_2": 0.08945685319110486,
|
129 |
+
"scr_dir1_threshold_5": 0.6721314038361946,
|
130 |
+
"scr_metric_threshold_5": 0.1629392819145239,
|
131 |
+
"scr_dir2_threshold_5": 0.1629392819145239,
|
132 |
+
"scr_dir1_threshold_10": 0.6885246382192866,
|
133 |
+
"scr_metric_threshold_10": 0.23642171063794296,
|
134 |
+
"scr_dir2_threshold_10": 0.23642171063794296,
|
135 |
+
"scr_dir1_threshold_20": 0.6885246382192866,
|
136 |
+
"scr_metric_threshold_20": 0.3162940234065328,
|
137 |
+
"scr_dir2_threshold_20": 0.3162940234065328,
|
138 |
+
"scr_dir1_threshold_50": 0.7213115955482162,
|
139 |
+
"scr_metric_threshold_50": 0.4728434212758859,
|
140 |
+
"scr_dir2_threshold_50": 0.4728434212758859,
|
141 |
+
"scr_dir1_threshold_100": 0.7131149783566704,
|
142 |
+
"scr_metric_threshold_100": 0.5591054276594858,
|
143 |
+
"scr_dir2_threshold_100": 0.5591054276594858,
|
144 |
+
"scr_dir1_threshold_500": 0.08196714904095133,
|
145 |
+
"scr_metric_threshold_500": -0.37699668403961023,
|
146 |
+
"scr_dir2_threshold_500": -0.37699668403961023
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.4597702960637646,
|
151 |
+
"scr_metric_threshold_2": 0.04385960631655852,
|
152 |
+
"scr_dir2_threshold_2": 0.04385960631655852,
|
153 |
+
"scr_dir1_threshold_5": 0.5172414974328899,
|
154 |
+
"scr_metric_threshold_5": 0.06432752716716907,
|
155 |
+
"scr_dir2_threshold_5": 0.06432752716716907,
|
156 |
+
"scr_dir1_threshold_10": 0.43678208956041925,
|
157 |
+
"scr_metric_threshold_10": 0.10818713348372759,
|
158 |
+
"scr_dir2_threshold_10": 0.10818713348372759,
|
159 |
+
"scr_dir1_threshold_20": 0.4252873011979847,
|
160 |
+
"scr_metric_threshold_20": 0.15497067869820672,
|
161 |
+
"scr_dir2_threshold_20": 0.15497067869820672,
|
162 |
+
"scr_dir1_threshold_50": 0.1954024957214833,
|
163 |
+
"scr_metric_threshold_50": 0.2807017941346242,
|
164 |
+
"scr_dir2_threshold_50": 0.2807017941346242,
|
165 |
+
"scr_dir1_threshold_100": 0.413793197946312,
|
166 |
+
"scr_metric_threshold_100": 0.27485374205619983,
|
167 |
+
"scr_dir2_threshold_100": 0.27485374205619983,
|
168 |
+
"scr_dir1_threshold_500": -0.3333331049630794,
|
169 |
+
"scr_metric_threshold_500": -0.08187133483727584,
|
170 |
+
"scr_dir2_threshold_500": -0.08187133483727584
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.33749982304867393,
|
175 |
+
"scr_metric_threshold_2": 0.08856083610131997,
|
176 |
+
"scr_dir2_threshold_2": 0.08856083610131997,
|
177 |
+
"scr_dir1_threshold_5": 0.38125001396984154,
|
178 |
+
"scr_metric_threshold_5": 0.12915137510760727,
|
179 |
+
"scr_dir2_threshold_5": 0.12915137510760727,
|
180 |
+
"scr_dir1_threshold_10": 0.36249987892804003,
|
181 |
+
"scr_metric_threshold_10": 0.1992620461854571,
|
182 |
+
"scr_dir2_threshold_10": 0.1992620461854571,
|
183 |
+
"scr_dir1_threshold_20": -0.025000055879366136,
|
184 |
+
"scr_metric_threshold_20": 0.29520308026215486,
|
185 |
+
"scr_dir2_threshold_20": 0.29520308026215486,
|
186 |
+
"scr_dir1_threshold_50": 0.01874976251269393,
|
187 |
+
"scr_metric_threshold_50": 0.4612545654733862,
|
188 |
+
"scr_dir2_threshold_50": 0.4612545654733862,
|
189 |
+
"scr_dir1_threshold_100": -0.05625003259629691,
|
190 |
+
"scr_metric_threshold_100": 0.4095940594190065,
|
191 |
+
"scr_dir2_threshold_100": 0.4095940594190065,
|
192 |
+
"scr_dir1_threshold_500": 0.08750000931322768,
|
193 |
+
"scr_metric_threshold_500": 0.04428052802231809,
|
194 |
+
"scr_dir2_threshold_500": 0.04428052802231809
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.026845683225018263,
|
199 |
+
"scr_metric_threshold_2": 0.3791470774094751,
|
200 |
+
"scr_dir2_threshold_2": 0.3791470774094751,
|
201 |
+
"scr_dir1_threshold_5": 0.11409405369856701,
|
202 |
+
"scr_metric_threshold_5": 0.402843734436763,
|
203 |
+
"scr_dir2_threshold_5": 0.402843734436763,
|
204 |
+
"scr_dir1_threshold_10": -0.29530171541311606,
|
205 |
+
"scr_metric_threshold_10": 0.4834122553349682,
|
206 |
+
"scr_dir2_threshold_10": 0.4834122553349682,
|
207 |
+
"scr_dir1_threshold_20": -0.1744963409160551,
|
208 |
+
"scr_metric_threshold_20": 0.5165877446650319,
|
209 |
+
"scr_dir2_threshold_20": 0.5165877446650319,
|
210 |
+
"scr_dir1_threshold_50": -0.04697964562050015,
|
211 |
+
"scr_metric_threshold_50": 0.5924169906550663,
|
212 |
+
"scr_dir2_threshold_50": 0.5924169906550663,
|
213 |
+
"scr_dir1_threshold_100": 0.0939596912720427,
|
214 |
+
"scr_metric_threshold_100": 0.4928910876377439,
|
215 |
+
"scr_dir2_threshold_100": 0.4928910876377439,
|
216 |
+
"scr_dir1_threshold_500": -0.5570468268337623,
|
217 |
+
"scr_metric_threshold_500": 0.3744075200148701,
|
218 |
+
"scr_dir2_threshold_500": 0.3744075200148701
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.15217366891609393,
|
223 |
+
"scr_metric_threshold_2": 0.08490568690131776,
|
224 |
+
"scr_dir2_threshold_2": 0.08490568690131776,
|
225 |
+
"scr_dir1_threshold_5": 0.21739107899947133,
|
226 |
+
"scr_metric_threshold_5": 0.13679237060591493,
|
227 |
+
"scr_dir2_threshold_5": 0.13679237060591493,
|
228 |
+
"scr_dir1_threshold_10": 0.31159405266631424,
|
229 |
+
"scr_metric_threshold_10": 0.20754701597235695,
|
230 |
+
"scr_dir2_threshold_10": 0.20754701597235695,
|
231 |
+
"scr_dir1_threshold_20": 0.13768110308320117,
|
232 |
+
"scr_metric_threshold_20": 0.3066037444085504,
|
233 |
+
"scr_dir2_threshold_20": 0.3066037444085504,
|
234 |
+
"scr_dir1_threshold_50": 0.16666666666666666,
|
235 |
+
"scr_metric_threshold_50": 0.3820753099019616,
|
236 |
+
"scr_dir2_threshold_50": 0.3820753099019616,
|
237 |
+
"scr_dir1_threshold_100": 0.11594182241618206,
|
238 |
+
"scr_metric_threshold_100": 0.46226407667631025,
|
239 |
+
"scr_dir2_threshold_100": 0.46226407667631025,
|
240 |
+
"scr_dir1_threshold_500": 0.23188407675004405,
|
241 |
+
"scr_metric_threshold_500": 0.5377356421697214,
|
242 |
+
"scr_dir2_threshold_500": 0.5377356421697214
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.04724441230070582,
|
247 |
+
"scr_metric_threshold_2": 0.04819286625795596,
|
248 |
+
"scr_dir2_threshold_2": 0.04819286625795596,
|
249 |
+
"scr_dir1_threshold_5": 0.10236226754781394,
|
250 |
+
"scr_metric_threshold_5": 0.11244978189249286,
|
251 |
+
"scr_dir2_threshold_5": 0.11244978189249286,
|
252 |
+
"scr_dir1_threshold_10": 0.2362206535201696,
|
253 |
+
"scr_metric_threshold_10": 0.1526103840860873,
|
254 |
+
"scr_dir2_threshold_10": 0.1526103840860873,
|
255 |
+
"scr_dir1_threshold_20": 0.3700785701647388,
|
256 |
+
"scr_metric_threshold_20": 0.20481926268818848,
|
257 |
+
"scr_dir2_threshold_20": 0.20481926268818848,
|
258 |
+
"scr_dir1_threshold_50": 0.43307080701382217,
|
259 |
+
"scr_metric_threshold_50": 0.2690764176987964,
|
260 |
+
"scr_dir2_threshold_50": 0.2690764176987964,
|
261 |
+
"scr_dir1_threshold_100": 0.4881891315887168,
|
262 |
+
"scr_metric_threshold_100": 0.3132530322365361,
|
263 |
+
"scr_dir2_threshold_100": 0.3132530322365361,
|
264 |
+
"scr_dir1_threshold_500": 0.4881891315887168,
|
265 |
+
"scr_metric_threshold_500": 0.27710844238708693,
|
266 |
+
"scr_dir2_threshold_500": 0.27710844238708693
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.018181942973895696,
|
271 |
+
"scr_metric_threshold_2": 0.05555550036607185,
|
272 |
+
"scr_dir2_threshold_2": 0.05555550036607185,
|
273 |
+
"scr_dir1_threshold_5": 0.030302997463009903,
|
274 |
+
"scr_metric_threshold_5": 0.13888908205208186,
|
275 |
+
"scr_dir2_threshold_5": 0.13888908205208186,
|
276 |
+
"scr_dir1_threshold_10": 0.08484846514447261,
|
277 |
+
"scr_metric_threshold_10": 0.12222229948749942,
|
278 |
+
"scr_dir2_threshold_10": 0.12222229948749942,
|
279 |
+
"scr_dir1_threshold_20": 0.19393940050739802,
|
280 |
+
"scr_metric_threshold_20": 0.0944445493044635,
|
281 |
+
"scr_dir2_threshold_20": 0.0944445493044635,
|
282 |
+
"scr_dir1_threshold_50": 0.23636345245952212,
|
283 |
+
"scr_metric_threshold_50": 0.06666679912142758,
|
284 |
+
"scr_dir2_threshold_50": 0.06666679912142758,
|
285 |
+
"scr_dir1_threshold_100": 0.29090892014098485,
|
286 |
+
"scr_metric_threshold_100": -0.0055554838092267324,
|
287 |
+
"scr_dir2_threshold_100": -0.0055554838092267324,
|
288 |
+
"scr_dir1_threshold_500": 0.26060592267797494,
|
289 |
+
"scr_metric_threshold_500": -0.1333332671059529,
|
290 |
+
"scr_dir2_threshold_500": -0.1333332671059529
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_StandardTrainerAprilUpdate_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_8",
|
296 |
+
"sae_lens_version": "5.3.0",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "standard_april_update",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_0_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 16,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": true,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "9a25f3da-55b9-4e7b-8d8f-32addffd6338",
|
73 |
+
"datetime_epoch_millis": 1737045305652,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.0424198751971939,
|
77 |
+
"scr_metric_threshold_2": 0.11146310871089796,
|
78 |
+
"scr_dir2_threshold_2": 0.11146310871089796,
|
79 |
+
"scr_dir1_threshold_5": 0.12957450552511804,
|
80 |
+
"scr_metric_threshold_5": 0.14824918055941527,
|
81 |
+
"scr_dir2_threshold_5": 0.14824918055941527,
|
82 |
+
"scr_dir1_threshold_10": 0.18407340705528977,
|
83 |
+
"scr_metric_threshold_10": 0.200574020361154,
|
84 |
+
"scr_dir2_threshold_10": 0.200574020361154,
|
85 |
+
"scr_dir1_threshold_20": 0.17918096391815555,
|
86 |
+
"scr_metric_threshold_20": 0.24989248100762018,
|
87 |
+
"scr_dir2_threshold_20": 0.24989248100762018,
|
88 |
+
"scr_dir1_threshold_50": 0.21441364058306378,
|
89 |
+
"scr_metric_threshold_50": 0.190181892650874,
|
90 |
+
"scr_dir2_threshold_50": 0.190181892650874,
|
91 |
+
"scr_dir1_threshold_100": 0.07548573264654815,
|
92 |
+
"scr_metric_threshold_100": 0.2203149891228706,
|
93 |
+
"scr_dir2_threshold_100": 0.2203149891228706,
|
94 |
+
"scr_dir1_threshold_500": -0.32550871553246685,
|
95 |
+
"scr_metric_threshold_500": -0.08731195917358915,
|
96 |
+
"scr_dir2_threshold_500": -0.08731195917358915
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.35294138271494124,
|
103 |
+
"scr_metric_threshold_2": 0.025352120716357125,
|
104 |
+
"scr_dir2_threshold_2": 0.025352120716357125,
|
105 |
+
"scr_dir1_threshold_5": 0.5058826251837225,
|
106 |
+
"scr_metric_threshold_5": 0.0676055992768194,
|
107 |
+
"scr_dir2_threshold_5": 0.0676055992768194,
|
108 |
+
"scr_dir1_threshold_10": 0.5882351703710352,
|
109 |
+
"scr_metric_threshold_10": 0.10704221284666396,
|
110 |
+
"scr_dir2_threshold_10": 0.10704221284666396,
|
111 |
+
"scr_dir1_threshold_20": 0.5647060720977459,
|
112 |
+
"scr_metric_threshold_20": 0.2028167978304582,
|
113 |
+
"scr_dir2_threshold_20": 0.2028167978304582,
|
114 |
+
"scr_dir1_threshold_50": 0.5999997195076798,
|
115 |
+
"scr_metric_threshold_50": 0.008450594971853177,
|
116 |
+
"scr_dir2_threshold_50": 0.008450594971853177,
|
117 |
+
"scr_dir1_threshold_100": -0.14117599210133627,
|
118 |
+
"scr_metric_threshold_100": 0.0788732271396891,
|
119 |
+
"scr_dir2_threshold_100": 0.0788732271396891,
|
120 |
+
"scr_dir1_threshold_500": -0.117646893828047,
|
121 |
+
"scr_metric_threshold_500": 0.08169009213030683,
|
122 |
+
"scr_dir2_threshold_500": 0.08169009213030683
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.6065574891783352,
|
127 |
+
"scr_metric_threshold_2": 0.12779558617164727,
|
128 |
+
"scr_dir2_threshold_2": 0.12779558617164727,
|
129 |
+
"scr_dir1_threshold_5": 0.6065574891783352,
|
130 |
+
"scr_metric_threshold_5": 0.19488832128005631,
|
131 |
+
"scr_dir2_threshold_5": 0.19488832128005631,
|
132 |
+
"scr_dir1_threshold_10": 0.6803280210277406,
|
133 |
+
"scr_metric_threshold_10": 0.297124752131342,
|
134 |
+
"scr_dir2_threshold_10": 0.297124752131342,
|
135 |
+
"scr_dir1_threshold_20": 0.6803280210277406,
|
136 |
+
"scr_metric_threshold_20": 0.3514377191494094,
|
137 |
+
"scr_dir2_threshold_20": 0.3514377191494094,
|
138 |
+
"scr_dir1_threshold_50": 0.737704829931308,
|
139 |
+
"scr_metric_threshold_50": -0.028754002127866617,
|
140 |
+
"scr_dir2_threshold_50": -0.028754002127866617,
|
141 |
+
"scr_dir1_threshold_100": 0.4918033828084541,
|
142 |
+
"scr_metric_threshold_100": -0.003194846807505005,
|
143 |
+
"scr_dir2_threshold_100": -0.003194846807505005,
|
144 |
+
"scr_dir1_threshold_500": 0.016393234383091822,
|
145 |
+
"scr_metric_threshold_500": -0.20447267127241053,
|
146 |
+
"scr_dir2_threshold_500": -0.20447267127241053
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.2988507952080613,
|
151 |
+
"scr_metric_threshold_2": 0.052631597292903495,
|
152 |
+
"scr_dir2_threshold_2": 0.052631597292903495,
|
153 |
+
"scr_dir1_threshold_5": 0.4252873011979847,
|
154 |
+
"scr_metric_threshold_5": 0.0906433258136208,
|
155 |
+
"scr_dir2_threshold_5": 0.0906433258136208,
|
156 |
+
"scr_dir1_threshold_10": 0.5402303890469973,
|
157 |
+
"scr_metric_threshold_10": 0.13157899323225872,
|
158 |
+
"scr_dir2_threshold_10": 0.13157899323225872,
|
159 |
+
"scr_dir1_threshold_20": 0.5402303890469973,
|
160 |
+
"scr_metric_threshold_20": 0.16374266967455167,
|
161 |
+
"scr_dir2_threshold_20": 0.16374266967455167,
|
162 |
+
"scr_dir1_threshold_50": 0.5517244922986699,
|
163 |
+
"scr_metric_threshold_50": 0.26900586426035866,
|
164 |
+
"scr_dir2_threshold_50": 0.26900586426035866,
|
165 |
+
"scr_dir1_threshold_100": 0.25287369709060864,
|
166 |
+
"scr_metric_threshold_100": 0.32748533934910334,
|
167 |
+
"scr_dir2_threshold_100": 0.32748533934910334,
|
168 |
+
"scr_dir1_threshold_500": -0.8505746023959694,
|
169 |
+
"scr_metric_threshold_500": -0.023391859748531148,
|
170 |
+
"scr_dir2_threshold_500": -0.023391859748531148
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.0062499208375646435,
|
175 |
+
"scr_metric_threshold_2": 0.10332101210875932,
|
176 |
+
"scr_dir2_threshold_2": 0.10332101210875932,
|
177 |
+
"scr_dir1_threshold_5": 0.09999985098835698,
|
178 |
+
"scr_metric_threshold_5": 0.13284136412363806,
|
179 |
+
"scr_dir2_threshold_5": 0.13284136412363806,
|
180 |
+
"scr_dir1_threshold_10": 0.09374993015079233,
|
181 |
+
"scr_metric_threshold_10": 0.21402222219289643,
|
182 |
+
"scr_dir2_threshold_10": 0.21402222219289643,
|
183 |
+
"scr_dir1_threshold_20": -0.20000007450582152,
|
184 |
+
"scr_metric_threshold_20": 0.21402222219289643,
|
185 |
+
"scr_dir2_threshold_20": 0.21402222219289643,
|
186 |
+
"scr_dir1_threshold_50": -0.08750000931322768,
|
187 |
+
"scr_metric_threshold_50": 0.37269372937206624,
|
188 |
+
"scr_dir2_threshold_50": 0.37269372937206624,
|
189 |
+
"scr_dir1_threshold_100": 0.10624977182592162,
|
190 |
+
"scr_metric_threshold_100": 0.5719557755575233,
|
191 |
+
"scr_dir2_threshold_100": 0.5719557755575233,
|
192 |
+
"scr_dir1_threshold_500": 0.3687497997656047,
|
193 |
+
"scr_metric_threshold_500": -0.14022134215569965,
|
194 |
+
"scr_dir2_threshold_500": -0.14022134215569965
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": -1.1073823328690307,
|
199 |
+
"scr_metric_threshold_2": 0.3507108629875822,
|
200 |
+
"scr_dir2_threshold_2": 0.3507108629875822,
|
201 |
+
"scr_dir1_threshold_5": -1.1140936536675246,
|
202 |
+
"scr_metric_threshold_5": 0.3080568238411771,
|
203 |
+
"scr_dir2_threshold_5": 0.3080568238411771,
|
204 |
+
"scr_dir1_threshold_10": -1.0536909664189942,
|
205 |
+
"scr_metric_threshold_10": 0.3080568238411771,
|
206 |
+
"scr_dir2_threshold_10": 0.3080568238411771,
|
207 |
+
"scr_dir1_threshold_20": -0.9328855919219331,
|
208 |
+
"scr_metric_threshold_20": 0.2796208919057185,
|
209 |
+
"scr_dir2_threshold_20": 0.2796208919057185,
|
210 |
+
"scr_dir1_threshold_50": -0.8523485422468784,
|
211 |
+
"scr_metric_threshold_50": 0.10426546041192737,
|
212 |
+
"scr_dir2_threshold_50": 0.10426546041192737,
|
213 |
+
"scr_dir1_threshold_100": -0.771811892602866,
|
214 |
+
"scr_metric_threshold_100": 0.056872146357351556,
|
215 |
+
"scr_dir2_threshold_100": 0.056872146357351556,
|
216 |
+
"scr_dir1_threshold_500": -2.0335562039614272,
|
217 |
+
"scr_metric_threshold_500": -0.3696682451066994,
|
218 |
+
"scr_dir2_threshold_500": -0.3696682451066994
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.057971127166931,
|
223 |
+
"scr_metric_threshold_2": 0.08490568690131776,
|
224 |
+
"scr_dir2_threshold_2": 0.08490568690131776,
|
225 |
+
"scr_dir1_threshold_5": 0.23188407675004405,
|
226 |
+
"scr_metric_threshold_5": 0.10849056869013178,
|
227 |
+
"scr_dir2_threshold_5": 0.10849056869013178,
|
228 |
+
"scr_dir1_threshold_10": 0.2681159232499559,
|
229 |
+
"scr_metric_threshold_10": 0.21698113738026356,
|
230 |
+
"scr_dir2_threshold_10": 0.21698113738026356,
|
231 |
+
"scr_dir1_threshold_20": 0.3188403355827606,
|
232 |
+
"scr_metric_threshold_20": 0.2924527028736747,
|
233 |
+
"scr_dir2_threshold_20": 0.2924527028736747,
|
234 |
+
"scr_dir1_threshold_50": 0.007246282916446389,
|
235 |
+
"scr_metric_threshold_50": 0.3915094313098682,
|
236 |
+
"scr_dir2_threshold_50": 0.3915094313098682,
|
237 |
+
"scr_dir1_threshold_100": -0.04347812941635828,
|
238 |
+
"scr_metric_threshold_100": 0.4009432715638065,
|
239 |
+
"scr_dir2_threshold_100": 0.4009432715638065,
|
240 |
+
"scr_dir1_threshold_500": -0.014492997750572723,
|
241 |
+
"scr_metric_threshold_500": 0.32075450478945783,
|
242 |
+
"scr_dir2_threshold_500": 0.32075450478945783
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.11811009209619154,
|
247 |
+
"scr_metric_threshold_2": 0.08032120438718889,
|
248 |
+
"scr_dir2_threshold_2": 0.08032120438718889,
|
249 |
+
"scr_dir1_threshold_5": 0.2204723596440055,
|
250 |
+
"scr_metric_threshold_5": 0.14457835939779684,
|
251 |
+
"scr_dir2_threshold_5": 0.14457835939779684,
|
252 |
+
"scr_dir1_threshold_10": 0.2283462719181943,
|
253 |
+
"scr_metric_threshold_10": 0.16867467283873927,
|
254 |
+
"scr_dir2_threshold_10": 0.16867467283873927,
|
255 |
+
"scr_dir1_threshold_20": 0.32283462719181943,
|
256 |
+
"scr_metric_threshold_20": 0.31726904458068134,
|
257 |
+
"scr_dir2_threshold_20": 0.31726904458068134,
|
258 |
+
"scr_dir1_threshold_50": 0.5039369561370944,
|
259 |
+
"scr_metric_threshold_50": 0.2931727311397389,
|
260 |
+
"scr_dir2_threshold_50": 0.2931727311397389,
|
261 |
+
"scr_dir1_threshold_100": 0.4488191008899863,
|
262 |
+
"scr_metric_threshold_100": 0.2851404670753774,
|
263 |
+
"scr_dir2_threshold_100": 0.2851404670753774,
|
264 |
+
"scr_dir1_threshold_500": 0.35433074561636113,
|
265 |
+
"scr_metric_threshold_500": 0.09236948079569564,
|
266 |
+
"scr_dir2_threshold_500": 0.09236948079569564
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.006060527244557103,
|
271 |
+
"scr_metric_threshold_2": 0.06666679912142758,
|
272 |
+
"scr_dir2_threshold_2": 0.06666679912142758,
|
273 |
+
"scr_dir1_threshold_5": 0.060605994926019806,
|
274 |
+
"scr_metric_threshold_5": 0.13888908205208186,
|
275 |
+
"scr_dir2_threshold_5": 0.13888908205208186,
|
276 |
+
"scr_dir1_threshold_10": 0.12727251709659673,
|
277 |
+
"scr_metric_threshold_10": 0.16111134842589106,
|
278 |
+
"scr_dir2_threshold_10": 0.16111134842589106,
|
279 |
+
"scr_dir1_threshold_20": 0.13939393282593532,
|
280 |
+
"scr_metric_threshold_20": 0.17777779985357126,
|
281 |
+
"scr_dir2_threshold_20": 0.17777779985357126,
|
282 |
+
"scr_dir1_threshold_50": 0.2545453954334178,
|
283 |
+
"scr_metric_threshold_50": 0.11111133186904595,
|
284 |
+
"scr_dir2_threshold_50": 0.11111133186904595,
|
285 |
+
"scr_dir1_threshold_100": 0.26060592267797494,
|
286 |
+
"scr_metric_threshold_100": 0.04444453274761838,
|
287 |
+
"scr_dir2_threshold_100": 0.04444453274761838,
|
288 |
+
"scr_dir1_threshold_500": -0.32727280608877624,
|
289 |
+
"scr_metric_threshold_500": -0.4555556328208327,
|
290 |
+
"scr_dir2_threshold_500": -0.4555556328208327
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_TopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_0",
|
296 |
+
"sae_lens_version": "5.3.1",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "topk",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_1_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 16,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": true,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "9e07b9e1-9527-42ad-b0f2-0645106d4d5d",
|
73 |
+
"datetime_epoch_millis": 1737045451978,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.3195701480451186,
|
77 |
+
"scr_metric_threshold_2": 0.12214803165621196,
|
78 |
+
"scr_dir2_threshold_2": 0.12214803165621196,
|
79 |
+
"scr_dir1_threshold_5": 0.3830316285390672,
|
80 |
+
"scr_metric_threshold_5": 0.18813069075850533,
|
81 |
+
"scr_dir2_threshold_5": 0.18813069075850533,
|
82 |
+
"scr_dir1_threshold_10": 0.33622403896913927,
|
83 |
+
"scr_metric_threshold_10": 0.23185058844234674,
|
84 |
+
"scr_dir2_threshold_10": 0.23185058844234674,
|
85 |
+
"scr_dir1_threshold_20": 0.3624905011206335,
|
86 |
+
"scr_metric_threshold_20": 0.2974850994392395,
|
87 |
+
"scr_dir2_threshold_20": 0.2974850994392395,
|
88 |
+
"scr_dir1_threshold_50": 0.3251303548377636,
|
89 |
+
"scr_metric_threshold_50": 0.3346019314337155,
|
90 |
+
"scr_dir2_threshold_50": 0.3346019314337155,
|
91 |
+
"scr_dir1_threshold_100": 0.06338198476307814,
|
92 |
+
"scr_metric_threshold_100": 0.2542753108053761,
|
93 |
+
"scr_dir2_threshold_100": 0.2542753108053761,
|
94 |
+
"scr_dir1_threshold_500": -0.1675317627684643,
|
95 |
+
"scr_metric_threshold_500": -0.14146994643656693,
|
96 |
+
"scr_dir2_threshold_500": -0.14146994643656693
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.5764706212343906,
|
103 |
+
"scr_metric_threshold_2": 0.008450594971853177,
|
104 |
+
"scr_dir2_threshold_2": 0.008450594971853177,
|
105 |
+
"scr_dir1_threshold_5": 0.6117649698751249,
|
106 |
+
"scr_metric_threshold_5": 0.030985850697592576,
|
107 |
+
"scr_dir2_threshold_5": 0.030985850697592576,
|
108 |
+
"scr_dir1_threshold_10": 0.5529415229611013,
|
109 |
+
"scr_metric_threshold_10": 0.11549297571891592,
|
110 |
+
"scr_dir2_threshold_10": 0.11549297571891592,
|
111 |
+
"scr_dir1_threshold_20": 0.635294068148414,
|
112 |
+
"scr_metric_threshold_20": 0.2112675607027102,
|
113 |
+
"scr_dir2_threshold_20": 0.2112675607027102,
|
114 |
+
"scr_dir1_threshold_50": 0.6117649698751249,
|
115 |
+
"scr_metric_threshold_50": 0.03661974857922683,
|
116 |
+
"scr_dir2_threshold_50": 0.03661974857922683,
|
117 |
+
"scr_dir1_threshold_100": 0.2823533866642731,
|
118 |
+
"scr_metric_threshold_100": 0.06197170139518515,
|
119 |
+
"scr_dir2_threshold_100": 0.06197170139518515,
|
120 |
+
"scr_dir1_threshold_500": 0.25882358716018355,
|
121 |
+
"scr_metric_threshold_500": 0.05915483640456743,
|
122 |
+
"scr_dir2_threshold_500": 0.05915483640456743
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.754098552877146,
|
127 |
+
"scr_metric_threshold_2": 0.13738031702432307,
|
128 |
+
"scr_dir2_threshold_2": 0.13738031702432307,
|
129 |
+
"scr_dir1_threshold_5": 0.7131149783566704,
|
130 |
+
"scr_metric_threshold_5": 0.26517571276580953,
|
131 |
+
"scr_dir2_threshold_5": 0.26517571276580953,
|
132 |
+
"scr_dir1_threshold_10": 0.737704829931308,
|
133 |
+
"scr_metric_threshold_10": 0.3450480255343994,
|
134 |
+
"scr_dir2_threshold_10": 0.3450480255343994,
|
135 |
+
"scr_dir1_threshold_20": 0.7459019356856001,
|
136 |
+
"scr_metric_threshold_20": 0.43769972553300923,
|
137 |
+
"scr_dir2_threshold_20": 0.43769972553300923,
|
138 |
+
"scr_dir1_threshold_50": 0.7049183611651244,
|
139 |
+
"scr_metric_threshold_50": 0.6485622808505906,
|
140 |
+
"scr_dir2_threshold_50": 0.6485622808505906,
|
141 |
+
"scr_dir1_threshold_100": 0.762295170068692,
|
142 |
+
"scr_metric_threshold_100": -0.025558964890200826,
|
143 |
+
"scr_dir2_threshold_100": -0.025558964890200826,
|
144 |
+
"scr_dir1_threshold_500": 0.09836087198678926,
|
145 |
+
"scr_metric_threshold_500": -0.3290734106365528,
|
146 |
+
"scr_dir2_threshold_500": -0.3290734106365528
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.5057473941812173,
|
151 |
+
"scr_metric_threshold_2": 0.07309934386093087,
|
152 |
+
"scr_dir2_threshold_2": 0.07309934386093087,
|
153 |
+
"scr_dir1_threshold_5": 0.4482761928120919,
|
154 |
+
"scr_metric_threshold_5": 0.096491203609462,
|
155 |
+
"scr_dir2_threshold_5": 0.096491203609462,
|
156 |
+
"scr_dir1_threshold_10": 0.4827591876778719,
|
157 |
+
"scr_metric_threshold_10": 0.13157899323225872,
|
158 |
+
"scr_dir2_threshold_10": 0.13157899323225872,
|
159 |
+
"scr_dir1_threshold_20": 0.49425329092954456,
|
160 |
+
"scr_metric_threshold_20": 0.17836253844673786,
|
161 |
+
"scr_dir2_threshold_20": 0.17836253844673786,
|
162 |
+
"scr_dir1_threshold_50": 0.5287356006845627,
|
163 |
+
"scr_metric_threshold_50": 0.3099415316789966,
|
164 |
+
"scr_dir2_threshold_50": 0.3099415316789966,
|
165 |
+
"scr_dir1_threshold_100": 0.3563219965771866,
|
166 |
+
"scr_metric_threshold_100": 0.3479532601997139,
|
167 |
+
"scr_dir2_threshold_100": 0.3479532601997139,
|
168 |
+
"scr_dir1_threshold_500": -0.06896530462079804,
|
169 |
+
"scr_metric_threshold_500": -0.1871345294230828,
|
170 |
+
"scr_dir2_threshold_500": -0.1871345294230828
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.3687497997656047,
|
175 |
+
"scr_metric_threshold_2": 0.10332101210875932,
|
176 |
+
"scr_dir2_threshold_2": 0.10332101210875932,
|
177 |
+
"scr_dir1_threshold_5": 0.39374985564497084,
|
178 |
+
"scr_metric_threshold_5": 0.14391155111504664,
|
179 |
+
"scr_dir2_threshold_5": 0.14391155111504664,
|
180 |
+
"scr_dir1_threshold_10": 0.4062500698492077,
|
181 |
+
"scr_metric_threshold_10": 0.24354257420777517,
|
182 |
+
"scr_dir2_threshold_10": 0.24354257420777517,
|
183 |
+
"scr_dir1_threshold_20": 0.31875006053598,
|
184 |
+
"scr_metric_threshold_20": 0.45018459842529385,
|
185 |
+
"scr_dir2_threshold_20": 0.45018459842529385,
|
186 |
+
"scr_dir1_threshold_50": -0.06250032596296912,
|
187 |
+
"scr_metric_threshold_50": 0.45387458744132464,
|
188 |
+
"scr_dir2_threshold_50": 0.45387458744132464,
|
189 |
+
"scr_dir1_threshold_100": -0.34375011641534614,
|
190 |
+
"scr_metric_threshold_100": 0.6273062706279338,
|
191 |
+
"scr_dir2_threshold_100": 0.6273062706279338,
|
192 |
+
"scr_dir1_threshold_500": 0.062499953433861555,
|
193 |
+
"scr_metric_threshold_500": -0.10701100112479012,
|
194 |
+
"scr_dir2_threshold_500": -0.10701100112479012
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.07382572887656082,
|
199 |
+
"scr_metric_threshold_2": 0.3981044595285923,
|
200 |
+
"scr_dir2_threshold_2": 0.3981044595285923,
|
201 |
+
"scr_dir1_threshold_5": 0.16107369931906715,
|
202 |
+
"scr_metric_threshold_5": 0.4644551557022853,
|
203 |
+
"scr_dir2_threshold_5": 0.4644551557022853,
|
204 |
+
"scr_dir1_threshold_10": -0.4026844483131891,
|
205 |
+
"scr_metric_threshold_10": 0.3933649021339873,
|
206 |
+
"scr_dir2_threshold_10": 0.3933649021339873,
|
207 |
+
"scr_dir1_threshold_20": -0.3825500858866648,
|
208 |
+
"scr_metric_threshold_20": 0.3507108629875822,
|
209 |
+
"scr_dir2_threshold_20": 0.3507108629875822,
|
210 |
+
"scr_dir1_threshold_50": -0.13422801609404889,
|
211 |
+
"scr_metric_threshold_50": 0.265402784694772,
|
212 |
+
"scr_dir2_threshold_50": 0.265402784694772,
|
213 |
+
"scr_dir1_threshold_100": -1.3557044026616465,
|
214 |
+
"scr_metric_threshold_100": 0.19431281361290828,
|
215 |
+
"scr_dir2_threshold_100": 0.19431281361290828,
|
216 |
+
"scr_dir1_threshold_500": -1.7852341341688116,
|
217 |
+
"scr_metric_threshold_500": -0.398104177042158,
|
218 |
+
"scr_dir2_threshold_500": -0.398104177042158
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.11594182241618206,
|
223 |
+
"scr_metric_threshold_2": 0.08018848562038029,
|
224 |
+
"scr_dir2_threshold_2": 0.08018848562038029,
|
225 |
+
"scr_dir1_threshold_5": 0.3043477697498678,
|
226 |
+
"scr_metric_threshold_5": 0.14150929073288404,
|
227 |
+
"scr_dir2_threshold_5": 0.14150929073288404,
|
228 |
+
"scr_dir1_threshold_10": 0.4202895921660499,
|
229 |
+
"scr_metric_threshold_10": 0.23113189776117096,
|
230 |
+
"scr_dir2_threshold_10": 0.23113189776117096,
|
231 |
+
"scr_dir1_threshold_20": 0.4855070022494273,
|
232 |
+
"scr_metric_threshold_20": 0.3773583897749925,
|
233 |
+
"scr_dir2_threshold_20": 0.3773583897749925,
|
234 |
+
"scr_dir1_threshold_50": 0.3550721820826725,
|
235 |
+
"scr_metric_threshold_50": 0.5047169201269691,
|
236 |
+
"scr_dir2_threshold_50": 0.5047169201269691,
|
237 |
+
"scr_dir1_threshold_100": 0.04347812941635828,
|
238 |
+
"scr_metric_threshold_100": 0.4056601916907756,
|
239 |
+
"scr_dir2_threshold_100": 0.4056601916907756,
|
240 |
+
"scr_dir1_threshold_500": 0.03623184649991189,
|
241 |
+
"scr_metric_threshold_500": 0.28301886261973647,
|
242 |
+
"scr_dir2_threshold_500": 0.28301886261973647
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.14960621052073325,
|
247 |
+
"scr_metric_threshold_2": 0.10441775720420239,
|
248 |
+
"scr_dir2_threshold_2": 0.10441775720420239,
|
249 |
+
"scr_dir1_threshold_5": 0.32283462719181943,
|
250 |
+
"scr_metric_threshold_5": 0.18473896159139128,
|
251 |
+
"scr_dir2_threshold_5": 0.18473896159139128,
|
252 |
+
"scr_dir1_threshold_10": 0.32283462719181943,
|
253 |
+
"scr_metric_threshold_10": 0.21686753909669523,
|
254 |
+
"scr_dir2_threshold_10": 0.21686753909669523,
|
255 |
+
"scr_dir1_threshold_20": 0.43307080701382217,
|
256 |
+
"scr_metric_threshold_20": 0.2409638525376377,
|
257 |
+
"scr_dir2_threshold_20": 0.2409638525376377,
|
258 |
+
"scr_dir1_threshold_50": 0.4881891315887168,
|
259 |
+
"scr_metric_threshold_50": 0.3132530322365361,
|
260 |
+
"scr_dir2_threshold_50": 0.3132530322365361,
|
261 |
+
"scr_dir1_threshold_100": 0.5984253114107195,
|
262 |
+
"scr_metric_threshold_100": 0.3614458984944921,
|
263 |
+
"scr_dir2_threshold_100": 0.3614458984944921,
|
264 |
+
"scr_dir1_threshold_500": 0.2519684780685472,
|
265 |
+
"scr_metric_threshold_500": 0.18072294924724605,
|
266 |
+
"scr_dir2_threshold_500": 0.18072294924724605
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.012121054489114207,
|
271 |
+
"scr_metric_threshold_2": 0.0722222829306543,
|
272 |
+
"scr_dir2_threshold_2": 0.0722222829306543,
|
273 |
+
"scr_dir1_threshold_5": 0.10909093536292541,
|
274 |
+
"scr_metric_threshold_5": 0.17777779985357126,
|
275 |
+
"scr_dir2_threshold_5": 0.17777779985357126,
|
276 |
+
"scr_dir1_threshold_10": 0.16969693028894522,
|
277 |
+
"scr_metric_threshold_10": 0.17777779985357126,
|
278 |
+
"scr_dir2_threshold_10": 0.17777779985357126,
|
279 |
+
"scr_dir1_threshold_20": 0.16969693028894522,
|
280 |
+
"scr_metric_threshold_20": 0.1333332671059529,
|
281 |
+
"scr_dir2_threshold_20": 0.1333332671059529,
|
282 |
+
"scr_dir1_threshold_50": 0.10909093536292541,
|
283 |
+
"scr_metric_threshold_50": 0.1444445658613086,
|
284 |
+
"scr_dir2_threshold_50": 0.1444445658613086,
|
285 |
+
"scr_dir1_threshold_100": 0.16363640304438812,
|
286 |
+
"scr_metric_threshold_100": 0.06111131531220084,
|
287 |
+
"scr_dir2_threshold_100": 0.06111131531220084,
|
288 |
+
"scr_dir1_threshold_500": -0.19393940050739802,
|
289 |
+
"scr_metric_threshold_500": -0.6333331015375018,
|
290 |
+
"scr_dir2_threshold_500": -0.6333331015375018
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_TopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_1",
|
296 |
+
"sae_lens_version": "5.3.1",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "topk",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_2_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 16,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": true,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "4b23d805-2776-4085-b9f9-cb2ed32658eb",
|
73 |
+
"datetime_epoch_millis": 1737045598474,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.368718184945681,
|
77 |
+
"scr_metric_threshold_2": 0.1242216518234524,
|
78 |
+
"scr_dir2_threshold_2": 0.1242216518234524,
|
79 |
+
"scr_dir1_threshold_5": 0.4345766938695985,
|
80 |
+
"scr_metric_threshold_5": 0.17583554950868052,
|
81 |
+
"scr_dir2_threshold_5": 0.17583554950868052,
|
82 |
+
"scr_dir1_threshold_10": 0.4431259576911283,
|
83 |
+
"scr_metric_threshold_10": 0.24629120211674305,
|
84 |
+
"scr_dir2_threshold_10": 0.24629120211674305,
|
85 |
+
"scr_dir1_threshold_20": 0.3975099646820329,
|
86 |
+
"scr_metric_threshold_20": 0.33239503899520695,
|
87 |
+
"scr_dir2_threshold_20": 0.33239503899520695,
|
88 |
+
"scr_dir1_threshold_50": 0.42429301244866086,
|
89 |
+
"scr_metric_threshold_50": 0.41622870638816833,
|
90 |
+
"scr_dir2_threshold_50": 0.41622870638816833,
|
91 |
+
"scr_dir1_threshold_100": 0.30859433239010153,
|
92 |
+
"scr_metric_threshold_100": 0.4681670084491676,
|
93 |
+
"scr_dir2_threshold_100": 0.4681670084491676,
|
94 |
+
"scr_dir1_threshold_500": -0.23903495235059144,
|
95 |
+
"scr_metric_threshold_500": -0.08479129439612641,
|
96 |
+
"scr_dir2_threshold_500": -0.08479129439612641
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.6117649698751249,
|
103 |
+
"scr_metric_threshold_2": 0.014084492853487425,
|
104 |
+
"scr_dir2_threshold_2": 0.014084492853487425,
|
105 |
+
"scr_dir1_threshold_5": 0.635294068148414,
|
106 |
+
"scr_metric_threshold_5": 0.025352120716357125,
|
107 |
+
"scr_dir2_threshold_5": 0.025352120716357125,
|
108 |
+
"scr_dir1_threshold_10": 0.5882351703710352,
|
109 |
+
"scr_metric_threshold_10": 0.05915483640456743,
|
110 |
+
"scr_dir2_threshold_10": 0.05915483640456743,
|
111 |
+
"scr_dir1_threshold_20": 0.7411764128398165,
|
112 |
+
"scr_metric_threshold_20": 0.16056331926999592,
|
113 |
+
"scr_dir2_threshold_20": 0.16056331926999592,
|
114 |
+
"scr_dir1_threshold_50": 0.7058820641990823,
|
115 |
+
"scr_metric_threshold_50": 0.0,
|
116 |
+
"scr_dir2_threshold_50": 0.0,
|
117 |
+
"scr_dir1_threshold_100": 0.5764706212343906,
|
118 |
+
"scr_metric_threshold_100": 0.016901357844105153,
|
119 |
+
"scr_dir2_threshold_100": 0.016901357844105153,
|
120 |
+
"scr_dir1_threshold_500": 0.08235324641811308,
|
121 |
+
"scr_metric_threshold_500": 0.05915483640456743,
|
122 |
+
"scr_dir2_threshold_500": 0.05915483640456743
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.8196724675350056,
|
127 |
+
"scr_metric_threshold_2": 0.10223643085128566,
|
128 |
+
"scr_dir2_threshold_2": 0.10223643085128566,
|
129 |
+
"scr_dir1_threshold_5": 0.8032787445891676,
|
130 |
+
"scr_metric_threshold_5": 0.1501598946845039,
|
131 |
+
"scr_dir2_threshold_5": 0.1501598946845039,
|
132 |
+
"scr_dir1_threshold_10": 0.8360657019180974,
|
133 |
+
"scr_metric_threshold_10": 0.23961674787560874,
|
134 |
+
"scr_dir2_threshold_10": 0.23961674787560874,
|
135 |
+
"scr_dir1_threshold_20": 0.8442623191096432,
|
136 |
+
"scr_metric_threshold_20": 0.3290736010667136,
|
137 |
+
"scr_dir2_threshold_20": 0.3290736010667136,
|
138 |
+
"scr_dir1_threshold_50": 0.8032787445891676,
|
139 |
+
"scr_metric_threshold_50": 0.4664537276608759,
|
140 |
+
"scr_dir2_threshold_50": 0.4664537276608759,
|
141 |
+
"scr_dir1_threshold_100": 0.7868855102060758,
|
142 |
+
"scr_metric_threshold_100": 0.5846645829798474,
|
143 |
+
"scr_dir2_threshold_100": 0.5846645829798474,
|
144 |
+
"scr_dir1_threshold_500": -1.3114758503434596,
|
145 |
+
"scr_metric_threshold_500": -0.4952075393585817,
|
146 |
+
"scr_dir2_threshold_500": -0.4952075393585817
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.6091956936677952,
|
151 |
+
"scr_metric_threshold_2": 0.11403501127956878,
|
152 |
+
"scr_dir2_threshold_2": 0.11403501127956878,
|
153 |
+
"scr_dir1_threshold_5": 0.620689796919468,
|
154 |
+
"scr_metric_threshold_5": 0.10526319458580699,
|
155 |
+
"scr_dir2_threshold_5": 0.10526319458580699,
|
156 |
+
"scr_dir1_threshold_10": 0.4482761928120919,
|
157 |
+
"scr_metric_threshold_10": 0.16959072175297604,
|
158 |
+
"scr_dir2_threshold_10": 0.16959072175297604,
|
159 |
+
"scr_dir1_threshold_20": 0.49425329092954456,
|
160 |
+
"scr_metric_threshold_20": 0.2076022759911102,
|
161 |
+
"scr_dir2_threshold_20": 0.2076022759911102,
|
162 |
+
"scr_dir1_threshold_50": 0.6321839001711407,
|
163 |
+
"scr_metric_threshold_50": 0.3596491900739795,
|
164 |
+
"scr_dir2_threshold_50": 0.3596491900739795,
|
165 |
+
"scr_dir1_threshold_100": 0.5517244922986699,
|
166 |
+
"scr_metric_threshold_100": 0.4444444638091759,
|
167 |
+
"scr_dir2_threshold_100": 0.4444444638091759,
|
168 |
+
"scr_dir1_threshold_500": 0.1954024957214833,
|
169 |
+
"scr_metric_threshold_500": -0.26315798646451743,
|
170 |
+
"scr_dir2_threshold_500": -0.26315798646451743
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.4062500698492077,
|
175 |
+
"scr_metric_threshold_2": 0.1143911991001679,
|
176 |
+
"scr_dir2_threshold_2": 0.1143911991001679,
|
177 |
+
"scr_dir1_threshold_5": 0.35000003725291073,
|
178 |
+
"scr_metric_threshold_5": 0.154981518163139,
|
179 |
+
"scr_dir2_threshold_5": 0.154981518163139,
|
180 |
+
"scr_dir1_threshold_10": 0.35000003725291073,
|
181 |
+
"scr_metric_threshold_10": 0.3062730473102472,
|
182 |
+
"scr_dir2_threshold_10": 0.3062730473102472,
|
183 |
+
"scr_dir1_threshold_20": -0.01875013504180149,
|
184 |
+
"scr_metric_threshold_20": 0.4649447744327332,
|
185 |
+
"scr_dir2_threshold_20": 0.4649447744327332,
|
186 |
+
"scr_dir1_threshold_50": -0.41250036321587985,
|
187 |
+
"scr_metric_threshold_50": 0.5682657865414925,
|
188 |
+
"scr_dir2_threshold_50": 0.5682657865414925,
|
189 |
+
"scr_dir1_threshold_100": -0.35625033061958294,
|
190 |
+
"scr_metric_threshold_100": 0.6383764576193424,
|
191 |
+
"scr_dir2_threshold_100": 0.6383764576193424,
|
192 |
+
"scr_dir1_threshold_500": -0.8625002514571476,
|
193 |
+
"scr_metric_threshold_500": -0.2619925192879291,
|
194 |
+
"scr_dir2_threshold_500": -0.2619925192879291
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.08724837047354875,
|
199 |
+
"scr_metric_threshold_2": 0.402843734436763,
|
200 |
+
"scr_dir2_threshold_2": 0.402843734436763,
|
201 |
+
"scr_dir1_threshold_5": 0.22147638656759763,
|
202 |
+
"scr_metric_threshold_5": 0.4597155983076803,
|
203 |
+
"scr_dir2_threshold_5": 0.4597155983076803,
|
204 |
+
"scr_dir1_threshold_10": 0.26845643221914023,
|
205 |
+
"scr_metric_threshold_10": 0.45023704849133883,
|
206 |
+
"scr_dir2_threshold_10": 0.45023704849133883,
|
207 |
+
"scr_dir1_threshold_20": -0.05369096641899412,
|
208 |
+
"scr_metric_threshold_20": 0.511848469756861,
|
209 |
+
"scr_dir2_threshold_20": 0.511848469756861,
|
210 |
+
"scr_dir1_threshold_50": 0.08053704967505479,
|
211 |
+
"scr_metric_threshold_50": 0.5450236766004904,
|
212 |
+
"scr_dir2_threshold_50": 0.5450236766004904,
|
213 |
+
"scr_dir1_threshold_100": -0.7315431677498174,
|
214 |
+
"scr_metric_threshold_100": 0.5260662944813733,
|
215 |
+
"scr_dir2_threshold_100": 0.5260662944813733,
|
216 |
+
"scr_dir1_threshold_500": -0.536912464407238,
|
217 |
+
"scr_metric_threshold_500": 0.07582952847646873,
|
218 |
+
"scr_dir2_threshold_500": 0.07582952847646873
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.18115923249955945,
|
223 |
+
"scr_metric_threshold_2": 0.08962260702828691,
|
224 |
+
"scr_dir2_threshold_2": 0.08962260702828691,
|
225 |
+
"scr_dir1_threshold_5": 0.3188403355827606,
|
226 |
+
"scr_metric_threshold_5": 0.16509417252169806,
|
227 |
+
"scr_dir2_threshold_5": 0.16509417252169806,
|
228 |
+
"scr_dir1_threshold_10": 0.4275363070001762,
|
229 |
+
"scr_metric_threshold_10": 0.3113206645355196,
|
230 |
+
"scr_dir2_threshold_10": 0.3113206645355196,
|
231 |
+
"scr_dir1_threshold_20": 0.3985507434167107,
|
232 |
+
"scr_metric_threshold_20": 0.4481130351414345,
|
233 |
+
"scr_dir2_threshold_20": 0.4481130351414345,
|
234 |
+
"scr_dir1_threshold_50": 0.5289855635834655,
|
235 |
+
"scr_metric_threshold_50": 0.5613208051125037,
|
236 |
+
"scr_dir2_threshold_50": 0.5613208051125037,
|
237 |
+
"scr_dir1_threshold_100": 0.47826071933298087,
|
238 |
+
"scr_metric_threshold_100": 0.6415092907328841,
|
239 |
+
"scr_dir2_threshold_100": 0.6415092907328841,
|
240 |
+
"scr_dir1_threshold_500": 0.07246369299982378,
|
241 |
+
"scr_metric_threshold_500": 0.46226407667631025,
|
242 |
+
"scr_dir2_threshold_500": 0.46226407667631025
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.14960621052073325,
|
247 |
+
"scr_metric_threshold_2": 0.08433745610740517,
|
248 |
+
"scr_dir2_threshold_2": 0.08433745610740517,
|
249 |
+
"scr_dir1_threshold_5": 0.39370077631509165,
|
250 |
+
"scr_metric_threshold_5": 0.1405623470536516,
|
251 |
+
"scr_dir2_threshold_5": 0.1405623470536516,
|
252 |
+
"scr_dir1_threshold_10": 0.37795295176671406,
|
253 |
+
"scr_metric_threshold_10": 0.20080325034404325,
|
254 |
+
"scr_dir2_threshold_10": 0.20080325034404325,
|
255 |
+
"scr_dir1_threshold_20": 0.440944719288011,
|
256 |
+
"scr_metric_threshold_20": 0.3092370198923909,
|
257 |
+
"scr_dir2_threshold_20": 0.3092370198923909,
|
258 |
+
"scr_dir1_threshold_50": 0.6377953421094501,
|
259 |
+
"scr_metric_threshold_50": 0.4457831152258262,
|
260 |
+
"scr_dir2_threshold_50": 0.4457831152258262,
|
261 |
+
"scr_dir1_threshold_100": 0.7086614912327223,
|
262 |
+
"scr_metric_threshold_100": 0.5100402702364342,
|
263 |
+
"scr_dir2_threshold_100": 0.5100402702364342,
|
264 |
+
"scr_dir1_threshold_500": 0.15748059212270857,
|
265 |
+
"scr_metric_threshold_500": 0.3614458984944921,
|
266 |
+
"scr_dir2_threshold_500": 0.3614458984944921
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.08484846514447261,
|
271 |
+
"scr_metric_threshold_2": 0.0722222829306543,
|
272 |
+
"scr_dir2_threshold_2": 0.0722222829306543,
|
273 |
+
"scr_dir1_threshold_5": 0.1333334055813782,
|
274 |
+
"scr_metric_threshold_5": 0.2055555500366072,
|
275 |
+
"scr_dir2_threshold_5": 0.2055555500366072,
|
276 |
+
"scr_dir1_threshold_10": 0.24848486818886073,
|
277 |
+
"scr_metric_threshold_10": 0.2333333002196431,
|
278 |
+
"scr_dir2_threshold_10": 0.2333333002196431,
|
279 |
+
"scr_dir1_threshold_20": 0.3333333333333333,
|
280 |
+
"scr_metric_threshold_20": 0.22777781641041636,
|
281 |
+
"scr_dir2_threshold_20": 0.22777781641041636,
|
282 |
+
"scr_dir1_threshold_50": 0.41818179847780595,
|
283 |
+
"scr_metric_threshold_50": 0.3833333498901784,
|
284 |
+
"scr_dir2_threshold_50": 0.3833333498901784,
|
285 |
+
"scr_dir1_threshold_100": 0.45454532318537294,
|
286 |
+
"scr_metric_threshold_100": 0.3833333498901784,
|
287 |
+
"scr_dir2_threshold_100": 0.3833333498901784,
|
288 |
+
"scr_dir1_threshold_500": 0.29090892014098485,
|
289 |
+
"scr_metric_threshold_500": -0.6166666501098216,
|
290 |
+
"scr_dir2_threshold_500": -0.6166666501098216
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_TopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_2",
|
296 |
+
"sae_lens_version": "5.3.1",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "topk",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_3_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 16,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": true,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "a2ec45ed-87e4-48d2-8ef1-e59a45b8d416",
|
73 |
+
"datetime_epoch_millis": 1737045743023,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.3960652574447106,
|
77 |
+
"scr_metric_threshold_2": 0.09669608038138713,
|
78 |
+
"scr_dir2_threshold_2": 0.09669608038138713,
|
79 |
+
"scr_dir1_threshold_5": 0.36320464958181004,
|
80 |
+
"scr_metric_threshold_5": 0.19501181790230362,
|
81 |
+
"scr_dir2_threshold_5": 0.19501181790230362,
|
82 |
+
"scr_dir1_threshold_10": 0.28852216789878316,
|
83 |
+
"scr_metric_threshold_10": 0.25318735428643924,
|
84 |
+
"scr_dir2_threshold_10": 0.25318735428643924,
|
85 |
+
"scr_dir1_threshold_20": 0.38340203063978046,
|
86 |
+
"scr_metric_threshold_20": 0.2697533792120724,
|
87 |
+
"scr_dir2_threshold_20": 0.2697533792120724,
|
88 |
+
"scr_dir1_threshold_50": 0.26410971745627176,
|
89 |
+
"scr_metric_threshold_50": 0.29109665682459734,
|
90 |
+
"scr_dir2_threshold_50": 0.29109665682459734,
|
91 |
+
"scr_dir1_threshold_100": 0.21193268025033246,
|
92 |
+
"scr_metric_threshold_100": 0.22667797951162474,
|
93 |
+
"scr_dir2_threshold_100": 0.22667797951162474,
|
94 |
+
"scr_dir1_threshold_500": -0.4297970907960992,
|
95 |
+
"scr_metric_threshold_500": -0.13249547259462813,
|
96 |
+
"scr_dir2_threshold_500": -0.13249547259462813
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.5882351703710352,
|
103 |
+
"scr_metric_threshold_2": -0.002817032891016523,
|
104 |
+
"scr_dir2_threshold_2": -0.002817032891016523,
|
105 |
+
"scr_dir1_threshold_5": 0.6588231664217034,
|
106 |
+
"scr_metric_threshold_5": 0.04788720854169773,
|
107 |
+
"scr_dir2_threshold_5": 0.04788720854169773,
|
108 |
+
"scr_dir1_threshold_10": 0.6941175150624376,
|
109 |
+
"scr_metric_threshold_10": 0.0676055992768194,
|
110 |
+
"scr_dir2_threshold_10": 0.0676055992768194,
|
111 |
+
"scr_dir1_threshold_20": 0.8470587575312188,
|
112 |
+
"scr_metric_threshold_20": 0.13239433356302108,
|
113 |
+
"scr_dir2_threshold_20": 0.13239433356302108,
|
114 |
+
"scr_dir1_threshold_50": 0.5764706212343906,
|
115 |
+
"scr_metric_threshold_50": 0.3577463871192187,
|
116 |
+
"scr_dir2_threshold_50": 0.3577463871192187,
|
117 |
+
"scr_dir1_threshold_100": 0.6117649698751249,
|
118 |
+
"scr_metric_threshold_100": 0.0676055992768194,
|
119 |
+
"scr_dir2_threshold_100": 0.0676055992768194,
|
120 |
+
"scr_dir1_threshold_500": 0.45882372740634364,
|
121 |
+
"scr_metric_threshold_500": -0.05915500430496622,
|
122 |
+
"scr_dir2_threshold_500": -0.05915500430496622
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.8114753617807134,
|
127 |
+
"scr_metric_threshold_2": 0.09904158404378066,
|
128 |
+
"scr_dir2_threshold_2": 0.09904158404378066,
|
129 |
+
"scr_dir1_threshold_5": 0.7868855102060758,
|
130 |
+
"scr_metric_threshold_5": 0.20766770851007632,
|
131 |
+
"scr_dir2_threshold_5": 0.20766770851007632,
|
132 |
+
"scr_dir1_threshold_10": 0.762295170068692,
|
133 |
+
"scr_metric_threshold_10": 0.29073486808617116,
|
134 |
+
"scr_dir2_threshold_10": 0.29073486808617116,
|
135 |
+
"scr_dir1_threshold_20": 0.8360657019180974,
|
136 |
+
"scr_metric_threshold_20": 0.41214057021264766,
|
137 |
+
"scr_dir2_threshold_20": 0.41214057021264766,
|
138 |
+
"scr_dir1_threshold_50": 0.737704829931308,
|
139 |
+
"scr_metric_threshold_50": 0.36741214361709523,
|
140 |
+
"scr_dir2_threshold_50": 0.36741214361709523,
|
141 |
+
"scr_dir1_threshold_100": 0.7950821273976216,
|
142 |
+
"scr_metric_threshold_100": 0.08306715957609484,
|
143 |
+
"scr_dir2_threshold_100": 0.08306715957609484,
|
144 |
+
"scr_dir1_threshold_500": -0.6803280210277406,
|
145 |
+
"scr_metric_threshold_500": -0.23642171063794296,
|
146 |
+
"scr_dir2_threshold_500": -0.23642171063794296
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.5977015904161226,
|
151 |
+
"scr_metric_threshold_2": 0.08771921263311704,
|
152 |
+
"scr_dir2_threshold_2": 0.08771921263311704,
|
153 |
+
"scr_dir1_threshold_5": 0.5747126988020154,
|
154 |
+
"scr_metric_threshold_5": 0.12573094115383435,
|
155 |
+
"scr_dir2_threshold_5": 0.12573094115383435,
|
156 |
+
"scr_dir1_threshold_10": 0.5402303890469973,
|
157 |
+
"scr_metric_threshold_10": 0.16666660857247229,
|
158 |
+
"scr_dir2_threshold_10": 0.16666660857247229,
|
159 |
+
"scr_dir1_threshold_20": 0.620689796919468,
|
160 |
+
"scr_metric_threshold_20": 0.23099413573964134,
|
161 |
+
"scr_dir2_threshold_20": 0.23099413573964134,
|
162 |
+
"scr_dir1_threshold_50": 0.2873566919563886,
|
163 |
+
"scr_metric_threshold_50": 0.3625731289719001,
|
164 |
+
"scr_dir2_threshold_50": 0.3625731289719001,
|
165 |
+
"scr_dir1_threshold_100": 0.2988507952080613,
|
166 |
+
"scr_metric_threshold_100": 0.45029234160501713,
|
167 |
+
"scr_dir2_threshold_100": 0.45029234160501713,
|
168 |
+
"scr_dir1_threshold_500": -0.1149424027382507,
|
169 |
+
"scr_metric_threshold_500": 0.08187133483727584,
|
170 |
+
"scr_dir2_threshold_500": 0.08187133483727584
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.4625001024455046,
|
175 |
+
"scr_metric_threshold_2": 0.055350495070410455,
|
176 |
+
"scr_dir2_threshold_2": 0.055350495070410455,
|
177 |
+
"scr_dir1_threshold_5": 0.36249987892804003,
|
178 |
+
"scr_metric_threshold_5": 0.09594103407669775,
|
179 |
+
"scr_dir2_threshold_5": 0.09594103407669775,
|
180 |
+
"scr_dir1_threshold_10": -0.20000007450582152,
|
181 |
+
"scr_metric_threshold_10": 0.16605170515454756,
|
182 |
+
"scr_dir2_threshold_10": 0.16605170515454756,
|
183 |
+
"scr_dir1_threshold_20": -0.006250293366672205,
|
184 |
+
"scr_metric_threshold_20": 0.154981518163139,
|
185 |
+
"scr_dir2_threshold_20": 0.154981518163139,
|
186 |
+
"scr_dir1_threshold_50": -0.08750000931322768,
|
187 |
+
"scr_metric_threshold_50": 0.0036899890160307885,
|
188 |
+
"scr_dir2_threshold_50": 0.0036899890160307885,
|
189 |
+
"scr_dir1_threshold_100": -0.4687503958121768,
|
190 |
+
"scr_metric_threshold_100": -0.3210332233176866,
|
191 |
+
"scr_dir2_threshold_100": -0.3210332233176866,
|
192 |
+
"scr_dir1_threshold_500": -1.7250005029142952,
|
193 |
+
"scr_metric_threshold_500": -0.7822877887910727,
|
194 |
+
"scr_dir2_threshold_500": -0.7822877887910727
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.16778542014860354,
|
199 |
+
"scr_metric_threshold_2": 0.33649303826307003,
|
200 |
+
"scr_dir2_threshold_2": 0.33649303826307003,
|
201 |
+
"scr_dir1_threshold_5": 0.1744967409470975,
|
202 |
+
"scr_metric_threshold_5": 0.3933649021339873,
|
203 |
+
"scr_dir2_threshold_5": 0.3933649021339873,
|
204 |
+
"scr_dir1_threshold_10": 0.16778542014860354,
|
205 |
+
"scr_metric_threshold_10": 0.4265403914640509,
|
206 |
+
"scr_dir2_threshold_10": 0.4265403914640509,
|
207 |
+
"scr_dir1_threshold_20": 0.18120806174559145,
|
208 |
+
"scr_metric_threshold_20": 0.1753554314937911,
|
209 |
+
"scr_dir2_threshold_20": 0.1753554314937911,
|
210 |
+
"scr_dir1_threshold_50": -0.18120806174559145,
|
211 |
+
"scr_metric_threshold_50": 0.45023704849133883,
|
212 |
+
"scr_dir2_threshold_50": 0.45023704849133883,
|
213 |
+
"scr_dir1_threshold_100": -0.0939596912720427,
|
214 |
+
"scr_metric_threshold_100": 0.9004740969826777,
|
215 |
+
"scr_dir2_threshold_100": 0.9004740969826777,
|
216 |
+
"scr_dir1_threshold_500": 0.4362414523367013,
|
217 |
+
"scr_metric_threshold_500": 0.6398105871960764,
|
218 |
+
"scr_dir2_threshold_500": 0.6398105871960764
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.23913035966649043,
|
223 |
+
"scr_metric_threshold_2": 0.10849056869013178,
|
224 |
+
"scr_dir2_threshold_2": 0.10849056869013178,
|
225 |
+
"scr_dir1_threshold_5": 0.05072441233280467,
|
226 |
+
"scr_metric_threshold_5": 0.22169805750723268,
|
227 |
+
"scr_dir2_threshold_5": 0.22169805750723268,
|
228 |
+
"scr_dir1_threshold_10": 0.13043482016675478,
|
229 |
+
"scr_metric_threshold_10": 0.33018862619736444,
|
230 |
+
"scr_dir2_threshold_10": 0.33018862619736444,
|
231 |
+
"scr_dir1_threshold_20": 0.3405796162497797,
|
232 |
+
"scr_metric_threshold_20": 0.45283023642237197,
|
233 |
+
"scr_dir2_threshold_20": 0.45283023642237197,
|
234 |
+
"scr_dir1_threshold_50": 0.4275363070001762,
|
235 |
+
"scr_metric_threshold_50": 0.3349055463243336,
|
236 |
+
"scr_dir2_threshold_50": 0.3349055463243336,
|
237 |
+
"scr_dir1_threshold_100": 0.04347812941635828,
|
238 |
+
"scr_metric_threshold_100": 0.2028300958453878,
|
239 |
+
"scr_dir2_threshold_100": 0.2028300958453878,
|
240 |
+
"scr_dir1_threshold_500": 0.3333333333333333,
|
241 |
+
"scr_metric_threshold_500": -0.08490568690131776,
|
242 |
+
"scr_dir2_threshold_500": -0.08490568690131776
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.20472453509562788,
|
247 |
+
"scr_metric_threshold_2": 0.10040174486005715,
|
248 |
+
"scr_dir2_threshold_2": 0.10040174486005715,
|
249 |
+
"scr_dir1_threshold_5": 0.13385838597235566,
|
250 |
+
"scr_metric_threshold_5": 0.22891557612913094,
|
251 |
+
"scr_dir2_threshold_5": 0.22891557612913094,
|
252 |
+
"scr_dir1_threshold_10": 0.03149611842454171,
|
253 |
+
"scr_metric_threshold_10": 0.2610441536344349,
|
254 |
+
"scr_dir2_threshold_10": 0.2610441536344349,
|
255 |
+
"scr_dir1_threshold_20": 0.02362220615035291,
|
256 |
+
"scr_metric_threshold_20": 0.27710844238708693,
|
257 |
+
"scr_dir2_threshold_20": 0.27710844238708693,
|
258 |
+
"scr_dir1_threshold_50": 0.34645683334217237,
|
259 |
+
"scr_metric_threshold_50": 0.38554221193543453,
|
260 |
+
"scr_dir2_threshold_50": 0.38554221193543453,
|
261 |
+
"scr_dir1_threshold_100": 0.15748059212270857,
|
262 |
+
"scr_metric_threshold_100": 0.6024097510321298,
|
263 |
+
"scr_dir2_threshold_100": 0.6024097510321298,
|
264 |
+
"scr_dir1_threshold_500": -1.2677163026169247,
|
265 |
+
"scr_metric_threshold_500": -0.052208878602101196,
|
266 |
+
"scr_dir2_threshold_500": -0.052208878602101196
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.09696951963358681,
|
271 |
+
"scr_metric_threshold_2": -0.011110967618453465,
|
272 |
+
"scr_dir2_threshold_2": -0.011110967618453465,
|
273 |
+
"scr_dir1_threshold_5": 0.16363640304438812,
|
274 |
+
"scr_metric_threshold_5": 0.2388891151657721,
|
275 |
+
"scr_dir2_threshold_5": 0.2388891151657721,
|
276 |
+
"scr_dir1_threshold_10": 0.18181798477805944,
|
277 |
+
"scr_metric_threshold_10": 0.3166668819056531,
|
278 |
+
"scr_dir2_threshold_10": 0.3166668819056531,
|
279 |
+
"scr_dir1_threshold_20": 0.22424239797040793,
|
280 |
+
"scr_metric_threshold_20": 0.3222223657148799,
|
281 |
+
"scr_dir2_threshold_20": 0.3222223657148799,
|
282 |
+
"scr_dir1_threshold_50": 0.006060527244557103,
|
283 |
+
"scr_metric_threshold_50": 0.06666679912142758,
|
284 |
+
"scr_dir2_threshold_50": 0.06666679912142758,
|
285 |
+
"scr_dir1_threshold_100": 0.35151491506700466,
|
286 |
+
"scr_metric_threshold_100": -0.17222198490744228,
|
287 |
+
"scr_dir2_threshold_100": -0.17222198490744228,
|
288 |
+
"scr_dir1_threshold_500": -0.8787880101479604,
|
289 |
+
"scr_metric_threshold_500": -0.5666666335529764,
|
290 |
+
"scr_dir2_threshold_500": -0.5666666335529764
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_TopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_3",
|
296 |
+
"sae_lens_version": "5.3.1",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "topk",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_4_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 16,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": true,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "7e846f6e-6b9d-4fb9-a313-429a6a646e55",
|
73 |
+
"datetime_epoch_millis": 1737045888870,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.4200155877593478,
|
77 |
+
"scr_metric_threshold_2": 0.10334738738183133,
|
78 |
+
"scr_dir2_threshold_2": 0.10334738738183133,
|
79 |
+
"scr_dir1_threshold_5": 0.0008559658569487656,
|
80 |
+
"scr_metric_threshold_5": 0.23284744316844658,
|
81 |
+
"scr_dir2_threshold_5": 0.23284744316844658,
|
82 |
+
"scr_dir1_threshold_10": -0.08509781808758993,
|
83 |
+
"scr_metric_threshold_10": 0.1733167603818313,
|
84 |
+
"scr_dir2_threshold_10": 0.1733167603818313,
|
85 |
+
"scr_dir1_threshold_20": 0.01086589429029737,
|
86 |
+
"scr_metric_threshold_20": 0.05803775914593163,
|
87 |
+
"scr_dir2_threshold_20": 0.05803775914593163,
|
88 |
+
"scr_dir1_threshold_50": -0.473909613274783,
|
89 |
+
"scr_metric_threshold_50": -0.013566211924211452,
|
90 |
+
"scr_dir2_threshold_50": -0.013566211924211452,
|
91 |
+
"scr_dir1_threshold_100": -1.3025312053395062,
|
92 |
+
"scr_metric_threshold_100": -0.20895109790070582,
|
93 |
+
"scr_dir2_threshold_100": -0.20895109790070582,
|
94 |
+
"scr_dir1_threshold_500": -0.8449806314105095,
|
95 |
+
"scr_metric_threshold_500": -0.25329263473722236,
|
96 |
+
"scr_dir2_threshold_500": -0.25329263473722236
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.6235295190117695,
|
103 |
+
"scr_metric_threshold_2": -0.005633897881634249,
|
104 |
+
"scr_dir2_threshold_2": -0.005633897881634249,
|
105 |
+
"scr_dir1_threshold_5": -1.0235290982732892,
|
106 |
+
"scr_metric_threshold_5": 0.033802715688210305,
|
107 |
+
"scr_dir2_threshold_5": 0.033802715688210305,
|
108 |
+
"scr_dir1_threshold_10": -0.3294115832108517,
|
109 |
+
"scr_metric_threshold_10": 0.05070424143271425,
|
110 |
+
"scr_dir2_threshold_10": 0.05070424143271425,
|
111 |
+
"scr_dir1_threshold_20": 0.1529412424687812,
|
112 |
+
"scr_metric_threshold_20": 0.08450695712092456,
|
113 |
+
"scr_dir2_threshold_20": 0.08450695712092456,
|
114 |
+
"scr_dir1_threshold_50": -2.258822184698583,
|
115 |
+
"scr_metric_threshold_50": -0.10704238074706275,
|
116 |
+
"scr_dir2_threshold_50": -0.10704238074706275,
|
117 |
+
"scr_dir1_threshold_100": -4.282351282971872,
|
118 |
+
"scr_metric_threshold_100": -0.15211272429814277,
|
119 |
+
"scr_dir2_threshold_100": -0.15211272429814277,
|
120 |
+
"scr_dir1_threshold_500": -0.49411737481627754,
|
121 |
+
"scr_metric_threshold_500": 0.10422534785604623,
|
122 |
+
"scr_dir2_threshold_500": 0.10422534785604623
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.8032787445891676,
|
127 |
+
"scr_metric_threshold_2": 0.07667746596108484,
|
128 |
+
"scr_dir2_threshold_2": 0.07667746596108484,
|
129 |
+
"scr_dir1_threshold_5": 0.4508198082879784,
|
130 |
+
"scr_metric_threshold_5": 0.1469648574468381,
|
131 |
+
"scr_dir2_threshold_5": 0.1469648574468381,
|
132 |
+
"scr_dir1_threshold_10": 0.516393722945838,
|
133 |
+
"scr_metric_threshold_10": 0.23003201702293294,
|
134 |
+
"scr_dir2_threshold_10": 0.23003201702293294,
|
135 |
+
"scr_dir1_threshold_20": 0.4918033828084541,
|
136 |
+
"scr_metric_threshold_20": 0.3642172968095902,
|
137 |
+
"scr_dir2_threshold_20": 0.3642172968095902,
|
138 |
+
"scr_dir1_threshold_50": -0.06557391465785951,
|
139 |
+
"scr_metric_threshold_50": 0.5463258499993049,
|
140 |
+
"scr_dir2_threshold_50": 0.5463258499993049,
|
141 |
+
"scr_dir1_threshold_100": -1.524590340137384,
|
142 |
+
"scr_metric_threshold_100": -0.3386581414892286,
|
143 |
+
"scr_dir2_threshold_100": -0.3386581414892286,
|
144 |
+
"scr_dir1_threshold_500": -1.9098367223302488,
|
145 |
+
"scr_metric_threshold_500": -0.5111819638262676,
|
146 |
+
"scr_dir2_threshold_500": -0.5111819638262676
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": 0.586206802053688,
|
151 |
+
"scr_metric_threshold_2": 0.096491203609462,
|
152 |
+
"scr_dir2_threshold_2": 0.096491203609462,
|
153 |
+
"scr_dir1_threshold_5": -0.6551721066744861,
|
154 |
+
"scr_metric_threshold_5": 0.1520467398002861,
|
155 |
+
"scr_dir2_threshold_5": 0.1520467398002861,
|
156 |
+
"scr_dir1_threshold_10": -0.03448230975501809,
|
157 |
+
"scr_metric_threshold_10": 0.16959072175297604,
|
158 |
+
"scr_dir2_threshold_10": 0.16959072175297604,
|
159 |
+
"scr_dir1_threshold_20": -0.4942526058187827,
|
160 |
+
"scr_metric_threshold_20": 0.23684201353548254,
|
161 |
+
"scr_dir2_threshold_20": 0.23684201353548254,
|
162 |
+
"scr_dir1_threshold_50": -1.9080458037650947,
|
163 |
+
"scr_metric_threshold_50": 0.3625731289719001,
|
164 |
+
"scr_dir2_threshold_50": 0.3625731289719001,
|
165 |
+
"scr_dir1_threshold_100": -4.068964619510036,
|
166 |
+
"scr_metric_threshold_100": 0.21637426696745518,
|
167 |
+
"scr_dir2_threshold_100": 0.21637426696745518,
|
168 |
+
"scr_dir1_threshold_500": -1.6091950085570335,
|
169 |
+
"scr_metric_threshold_500": -0.37134511994824504,
|
170 |
+
"scr_dir2_threshold_500": -0.37134511994824504
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": 0.4249998323619016,
|
175 |
+
"scr_metric_threshold_2": 0.09225104506066696,
|
176 |
+
"scr_dir2_threshold_2": 0.09225104506066696,
|
177 |
+
"scr_dir1_threshold_5": 0.5500001117587323,
|
178 |
+
"scr_metric_threshold_5": 0.14760154013107743,
|
179 |
+
"scr_dir2_threshold_5": 0.14760154013107743,
|
180 |
+
"scr_dir1_threshold_10": -0.5312503492460383,
|
181 |
+
"scr_metric_threshold_10": -0.3505533553892491,
|
182 |
+
"scr_dir2_threshold_10": -0.3505533553892491,
|
183 |
+
"scr_dir1_threshold_20": -0.18125031199312758,
|
184 |
+
"scr_metric_threshold_20": -0.33210319036577896,
|
185 |
+
"scr_dir2_threshold_20": -0.33210319036577896,
|
186 |
+
"scr_dir1_threshold_50": 0.08750000931322768,
|
187 |
+
"scr_metric_threshold_50": -0.04428030807900188,
|
188 |
+
"scr_dir2_threshold_50": -0.04428030807900188,
|
189 |
+
"scr_dir1_threshold_100": 0.18124993946402002,
|
190 |
+
"scr_metric_threshold_100": 0.2361623762323974,
|
191 |
+
"scr_dir2_threshold_100": 0.2361623762323974,
|
192 |
+
"scr_dir1_threshold_500": -1.4062504423783153,
|
193 |
+
"scr_metric_threshold_500": -0.4354242024745383,
|
194 |
+
"scr_dir2_threshold_500": -0.4354242024745383
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": 0.1543623785205732,
|
199 |
+
"scr_metric_threshold_2": 0.2606635097866013,
|
200 |
+
"scr_dir2_threshold_2": 0.2606635097866013,
|
201 |
+
"scr_dir1_threshold_5": 0.1946311033736218,
|
202 |
+
"scr_metric_threshold_5": 0.535545126784149,
|
203 |
+
"scr_dir2_threshold_5": 0.535545126784149,
|
204 |
+
"scr_dir1_threshold_10": -0.10067101207053668,
|
205 |
+
"scr_metric_threshold_10": 0.6872039012506522,
|
206 |
+
"scr_dir2_threshold_10": 0.6872039012506522,
|
207 |
+
"scr_dir1_threshold_20": 0.3758387650881708,
|
208 |
+
"scr_metric_threshold_20": 0.7914693616625795,
|
209 |
+
"scr_dir2_threshold_20": 0.7914693616625795,
|
210 |
+
"scr_dir1_threshold_50": 0.3288591194676707,
|
211 |
+
"scr_metric_threshold_50": 0.8436019506253261,
|
212 |
+
"scr_dir2_threshold_50": 0.8436019506253261,
|
213 |
+
"scr_dir1_threshold_100": -0.29530171541311606,
|
214 |
+
"scr_metric_threshold_100": 0.9146919217071898,
|
215 |
+
"scr_dir2_threshold_100": 0.9146919217071898,
|
216 |
+
"scr_dir1_threshold_500": -0.771811892602866,
|
217 |
+
"scr_metric_threshold_500": 0.014218107210946462,
|
218 |
+
"scr_dir2_threshold_500": 0.014218107210946462
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.32608705041688696,
|
223 |
+
"scr_metric_threshold_2": 0.1556603322677598,
|
224 |
+
"scr_dir2_threshold_2": 0.1556603322677598,
|
225 |
+
"scr_dir1_threshold_5": 0.3333333333333333,
|
226 |
+
"scr_metric_threshold_5": 0.5047169201269691,
|
227 |
+
"scr_dir2_threshold_5": 0.5047169201269691,
|
228 |
+
"scr_dir1_threshold_10": 0.4057970263331571,
|
229 |
+
"scr_metric_threshold_10": 0.35377350798617846,
|
230 |
+
"scr_dir2_threshold_10": 0.35377350798617846,
|
231 |
+
"scr_dir1_threshold_20": 0.36231889691679886,
|
232 |
+
"scr_metric_threshold_20": 0.10377364856316265,
|
233 |
+
"scr_dir2_threshold_20": 0.10377364856316265,
|
234 |
+
"scr_dir1_threshold_50": -0.18115966441723938,
|
235 |
+
"scr_metric_threshold_50": 0.08962260702828691,
|
236 |
+
"scr_dir2_threshold_50": 0.08962260702828691,
|
237 |
+
"scr_dir1_threshold_100": -0.6304348201667548,
|
238 |
+
"scr_metric_threshold_100": -0.7311321789151393,
|
239 |
+
"scr_dir2_threshold_100": -0.7311321789151393,
|
240 |
+
"scr_dir1_threshold_500": 0.557971127166931,
|
241 |
+
"scr_metric_threshold_500": -0.7688681022388291,
|
242 |
+
"scr_dir2_threshold_500": -0.7688681022388291
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.259842390342736,
|
247 |
+
"scr_metric_threshold_2": 0.05622489094624643,
|
248 |
+
"scr_dir2_threshold_2": 0.05622489094624643,
|
249 |
+
"scr_dir1_threshold_5": 0.33858292106798354,
|
250 |
+
"scr_metric_threshold_5": 0.1365460953334353,
|
251 |
+
"scr_dir2_threshold_5": 0.1365460953334353,
|
252 |
+
"scr_dir1_threshold_10": -0.18897624121946377,
|
253 |
+
"scr_metric_threshold_10": 0.11244978189249286,
|
254 |
+
"scr_dir2_threshold_10": 0.11244978189249286,
|
255 |
+
"scr_dir1_threshold_20": -0.2204723596440055,
|
256 |
+
"scr_metric_threshold_20": -0.4899597297635659,
|
257 |
+
"scr_dir2_threshold_20": -0.4899597297635659,
|
258 |
+
"scr_dir1_threshold_50": 0.15748059212270857,
|
259 |
+
"scr_metric_threshold_50": -0.6104417757204202,
|
260 |
+
"scr_dir2_threshold_50": -0.6104417757204202,
|
261 |
+
"scr_dir1_threshold_100": -0.23622018419238308,
|
262 |
+
"scr_metric_threshold_100": -0.6224900521289269,
|
263 |
+
"scr_dir2_threshold_100": -0.6224900521289269,
|
264 |
+
"scr_dir1_threshold_500": -0.10236226754781394,
|
265 |
+
"scr_metric_threshold_500": 0.7309238216772747,
|
266 |
+
"scr_dir2_threshold_500": 0.7309238216772747
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.18181798477805944,
|
271 |
+
"scr_metric_threshold_2": 0.0944445493044635,
|
272 |
+
"scr_dir2_threshold_2": 0.0944445493044635,
|
273 |
+
"scr_dir1_threshold_5": -0.1818183460182838,
|
274 |
+
"scr_metric_threshold_5": 0.2055555500366072,
|
275 |
+
"scr_dir2_threshold_5": 0.2055555500366072,
|
276 |
+
"scr_dir1_threshold_10": -0.41818179847780595,
|
277 |
+
"scr_metric_threshold_10": 0.1333332671059529,
|
278 |
+
"scr_dir2_threshold_10": 0.1333332671059529,
|
279 |
+
"scr_dir1_threshold_20": -0.39999985550391026,
|
280 |
+
"scr_metric_threshold_20": -0.2944442843949417,
|
281 |
+
"scr_dir2_threshold_20": -0.2944442843949417,
|
282 |
+
"scr_dir1_threshold_50": 0.0484849404369056,
|
283 |
+
"scr_metric_threshold_50": -1.1888887674720248,
|
284 |
+
"scr_dir2_threshold_50": -1.1888887674720248,
|
285 |
+
"scr_dir1_threshold_100": 0.43636338021147725,
|
286 |
+
"scr_metric_threshold_100": -1.1944442512812514,
|
287 |
+
"scr_dir2_threshold_100": -1.1944442512812514,
|
288 |
+
"scr_dir1_threshold_500": -1.0242424702184527,
|
289 |
+
"scr_metric_threshold_500": -0.788888966154166,
|
290 |
+
"scr_dir2_threshold_500": -0.788888966154166
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_TopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_4",
|
296 |
+
"sae_lens_version": "5.3.1",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "topk",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
scr/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_TopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_5_eval_results.json
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "scr",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"canrager/amazon_reviews_mcauley_1and5"
|
8 |
+
],
|
9 |
+
"perform_scr": true,
|
10 |
+
"early_stopping_patience": 20,
|
11 |
+
"train_set_size": 4000,
|
12 |
+
"test_set_size": 1000,
|
13 |
+
"context_length": 128,
|
14 |
+
"probe_train_batch_size": 16,
|
15 |
+
"probe_test_batch_size": 500,
|
16 |
+
"probe_epochs": 20,
|
17 |
+
"probe_lr": 0.001,
|
18 |
+
"probe_l1_penalty": 0.001,
|
19 |
+
"sae_batch_size": 16,
|
20 |
+
"llm_batch_size": 256,
|
21 |
+
"llm_dtype": "float32",
|
22 |
+
"lower_vram_usage": true,
|
23 |
+
"model_name": "pythia-160m-deduped",
|
24 |
+
"n_values": [
|
25 |
+
2,
|
26 |
+
5,
|
27 |
+
10,
|
28 |
+
20,
|
29 |
+
50,
|
30 |
+
100,
|
31 |
+
500
|
32 |
+
],
|
33 |
+
"column1_vals_lookup": {
|
34 |
+
"LabHC/bias_in_bios_class_set1": [
|
35 |
+
[
|
36 |
+
"professor",
|
37 |
+
"nurse"
|
38 |
+
],
|
39 |
+
[
|
40 |
+
"architect",
|
41 |
+
"journalist"
|
42 |
+
],
|
43 |
+
[
|
44 |
+
"surgeon",
|
45 |
+
"psychologist"
|
46 |
+
],
|
47 |
+
[
|
48 |
+
"attorney",
|
49 |
+
"teacher"
|
50 |
+
]
|
51 |
+
],
|
52 |
+
"canrager/amazon_reviews_mcauley_1and5": [
|
53 |
+
[
|
54 |
+
"Books",
|
55 |
+
"CDs_and_Vinyl"
|
56 |
+
],
|
57 |
+
[
|
58 |
+
"Software",
|
59 |
+
"Electronics"
|
60 |
+
],
|
61 |
+
[
|
62 |
+
"Pet_Supplies",
|
63 |
+
"Office_Products"
|
64 |
+
],
|
65 |
+
[
|
66 |
+
"Industrial_and_Scientific",
|
67 |
+
"Toys_and_Games"
|
68 |
+
]
|
69 |
+
]
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"eval_id": "d98511ab-6144-4d41-8e61-2e2e43f03205",
|
73 |
+
"datetime_epoch_millis": 1737046034030,
|
74 |
+
"eval_result_metrics": {
|
75 |
+
"scr_metrics": {
|
76 |
+
"scr_dir1_threshold_2": 0.02540106531327355,
|
77 |
+
"scr_metric_threshold_2": 0.04185308349134417,
|
78 |
+
"scr_dir2_threshold_2": 0.04185308349134417,
|
79 |
+
"scr_dir1_threshold_5": 0.18409064071977613,
|
80 |
+
"scr_metric_threshold_5": -0.03440268626635638,
|
81 |
+
"scr_dir2_threshold_5": -0.03440268626635638,
|
82 |
+
"scr_dir1_threshold_10": 0.2151496093242513,
|
83 |
+
"scr_metric_threshold_10": -0.0016078499108019666,
|
84 |
+
"scr_dir2_threshold_10": -0.0016078499108019666,
|
85 |
+
"scr_dir1_threshold_20": -0.14857902621425384,
|
86 |
+
"scr_metric_threshold_20": -0.0833807983659085,
|
87 |
+
"scr_dir2_threshold_20": -0.0833807983659085,
|
88 |
+
"scr_dir1_threshold_50": -0.6617985459738736,
|
89 |
+
"scr_metric_threshold_50": -0.10629422024769372,
|
90 |
+
"scr_dir2_threshold_50": -0.10629422024769372,
|
91 |
+
"scr_dir1_threshold_100": -1.654621977850247,
|
92 |
+
"scr_metric_threshold_100": -0.2558246118636499,
|
93 |
+
"scr_dir2_threshold_100": -0.2558246118636499,
|
94 |
+
"scr_dir1_threshold_500": -2.3769948473144957,
|
95 |
+
"scr_metric_threshold_500": -0.511700109726995,
|
96 |
+
"scr_dir2_threshold_500": -0.511700109726995
|
97 |
+
}
|
98 |
+
},
|
99 |
+
"eval_result_details": [
|
100 |
+
{
|
101 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
|
102 |
+
"scr_dir1_threshold_2": 0.08235324641811308,
|
103 |
+
"scr_metric_threshold_2": 0.0,
|
104 |
+
"scr_dir2_threshold_2": 0.0,
|
105 |
+
"scr_dir1_threshold_5": 0.4823528256796329,
|
106 |
+
"scr_metric_threshold_5": 0.008450594971853177,
|
107 |
+
"scr_dir2_threshold_5": 0.008450594971853177,
|
108 |
+
"scr_dir1_threshold_10": 0.14117669333213656,
|
109 |
+
"scr_metric_threshold_10": 0.12112670570015138,
|
110 |
+
"scr_dir2_threshold_10": 0.12112670570015138,
|
111 |
+
"scr_dir1_threshold_20": -0.18823488987871512,
|
112 |
+
"scr_metric_threshold_20": 0.21408442569332792,
|
113 |
+
"scr_dir2_threshold_20": 0.21408442569332792,
|
114 |
+
"scr_dir1_threshold_50": 0.35294138271494124,
|
115 |
+
"scr_metric_threshold_50": 0.28169002497014733,
|
116 |
+
"scr_dir2_threshold_50": 0.28169002497014733,
|
117 |
+
"scr_dir1_threshold_100": -4.2235278360578485,
|
118 |
+
"scr_metric_threshold_100": 0.48732385569162207,
|
119 |
+
"scr_dir2_threshold_100": 0.48732385569162207,
|
120 |
+
"scr_dir1_threshold_500": -4.035292244948334,
|
121 |
+
"scr_metric_threshold_500": -0.1126761107282982,
|
122 |
+
"scr_dir2_threshold_500": -0.1126761107282982
|
123 |
+
},
|
124 |
+
{
|
125 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
|
126 |
+
"scr_dir1_threshold_2": 0.16393429808190266,
|
127 |
+
"scr_metric_threshold_2": -0.02236411808269582,
|
128 |
+
"scr_dir2_threshold_2": -0.02236411808269582,
|
129 |
+
"scr_dir1_threshold_5": 0.5327869573289298,
|
130 |
+
"scr_metric_threshold_5": 0.03514388617303741,
|
131 |
+
"scr_dir2_threshold_5": 0.03514388617303741,
|
132 |
+
"scr_dir1_threshold_10": 0.5655739146578596,
|
133 |
+
"scr_metric_threshold_10": 0.07987231276858985,
|
134 |
+
"scr_dir2_threshold_10": 0.07987231276858985,
|
135 |
+
"scr_dir1_threshold_20": 0.7049183611651244,
|
136 |
+
"scr_metric_threshold_20": 0.054313157448228226,
|
137 |
+
"scr_dir2_threshold_20": 0.054313157448228226,
|
138 |
+
"scr_dir1_threshold_50": -1.6147545949326272,
|
139 |
+
"scr_metric_threshold_50": 0.3226837170215428,
|
140 |
+
"scr_dir2_threshold_50": 0.3226837170215428,
|
141 |
+
"scr_dir1_threshold_100": -1.9590169140422704,
|
142 |
+
"scr_metric_threshold_100": 0.3322684478742186,
|
143 |
+
"scr_dir2_threshold_100": 0.3322684478742186,
|
144 |
+
"scr_dir1_threshold_500": -1.6393444465072649,
|
145 |
+
"scr_metric_threshold_500": 0.009584730852675801,
|
146 |
+
"scr_dir2_threshold_500": 0.009584730852675801
|
147 |
+
},
|
148 |
+
{
|
149 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
|
150 |
+
"scr_dir1_threshold_2": -0.13793060924159609,
|
151 |
+
"scr_metric_threshold_2": 0.04385960631655852,
|
152 |
+
"scr_dir2_threshold_2": 0.04385960631655852,
|
153 |
+
"scr_dir1_threshold_5": 0.33333379007384123,
|
154 |
+
"scr_metric_threshold_5": 0.05555553619082409,
|
155 |
+
"scr_dir2_threshold_5": 0.05555553619082409,
|
156 |
+
"scr_dir1_threshold_10": 0.4252873011979847,
|
157 |
+
"scr_metric_threshold_10": 0.1520467398002861,
|
158 |
+
"scr_dir2_threshold_10": 0.1520467398002861,
|
159 |
+
"scr_dir1_threshold_20": -1.3908043063322046,
|
160 |
+
"scr_metric_threshold_20": 0.18128647734465844,
|
161 |
+
"scr_dir2_threshold_20": 0.18128647734465844,
|
162 |
+
"scr_dir1_threshold_50": -2.206895913862394,
|
163 |
+
"scr_metric_threshold_50": 0.0029239388979205995,
|
164 |
+
"scr_dir2_threshold_50": 0.0029239388979205995,
|
165 |
+
"scr_dir1_threshold_100": -3.034482309755018,
|
166 |
+
"scr_metric_threshold_100": 0.4181286651627242,
|
167 |
+
"scr_dir2_threshold_100": 0.4181286651627242,
|
168 |
+
"scr_dir1_threshold_500": -4.218390017114067,
|
169 |
+
"scr_metric_threshold_500": -0.21345032806953457,
|
170 |
+
"scr_dir2_threshold_500": -0.21345032806953457
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
|
174 |
+
"scr_dir1_threshold_2": -0.15625025611376145,
|
175 |
+
"scr_metric_threshold_2": 0.02214037398281715,
|
176 |
+
"scr_dir2_threshold_2": 0.02214037398281715,
|
177 |
+
"scr_dir1_threshold_5": 0.2749998696148124,
|
178 |
+
"scr_metric_threshold_5": 0.06273069304578824,
|
179 |
+
"scr_dir2_threshold_5": 0.06273069304578824,
|
180 |
+
"scr_dir1_threshold_10": 0.05625003259629691,
|
181 |
+
"scr_metric_threshold_10": 0.08118085806925839,
|
182 |
+
"scr_dir2_threshold_10": 0.08118085806925839,
|
183 |
+
"scr_dir1_threshold_20": -1.0437505634502753,
|
184 |
+
"scr_metric_threshold_20": 0.2361623762323974,
|
185 |
+
"scr_dir2_threshold_20": 0.2361623762323974,
|
186 |
+
"scr_dir1_threshold_50": -1.7375007171185322,
|
187 |
+
"scr_metric_threshold_50": 0.4243542354264459,
|
188 |
+
"scr_dir2_threshold_50": 0.4243542354264459,
|
189 |
+
"scr_dir1_threshold_100": -1.7250005029142952,
|
190 |
+
"scr_metric_threshold_100": -0.7896677668231343,
|
191 |
+
"scr_dir2_threshold_100": -0.7896677668231343,
|
192 |
+
"scr_dir1_threshold_500": -1.2125002887100584,
|
193 |
+
"scr_metric_threshold_500": -0.7601474148082557,
|
194 |
+
"scr_dir2_threshold_500": -0.7601474148082557
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
|
198 |
+
"scr_dir1_threshold_2": -0.0067113207984939634,
|
199 |
+
"scr_metric_threshold_2": 0.10900473532009809,
|
200 |
+
"scr_dir2_threshold_2": 0.10900473532009809,
|
201 |
+
"scr_dir1_threshold_5": -0.10738233286903064,
|
202 |
+
"scr_metric_threshold_5": 0.08530807829281019,
|
203 |
+
"scr_dir2_threshold_5": 0.08530807829281019,
|
204 |
+
"scr_dir1_threshold_10": 0.06711400804702444,
|
205 |
+
"scr_metric_threshold_10": -0.18483398131013254,
|
206 |
+
"scr_dir2_threshold_10": -0.18483398131013254,
|
207 |
+
"scr_dir1_threshold_20": 0.13422841612509132,
|
208 |
+
"scr_metric_threshold_20": -0.43601894128039237,
|
209 |
+
"scr_dir2_threshold_20": -0.43601894128039237,
|
210 |
+
"scr_dir1_threshold_50": 0.40939576911168307,
|
211 |
+
"scr_metric_threshold_50": -0.6540284119205886,
|
212 |
+
"scr_dir2_threshold_50": -0.6540284119205886,
|
213 |
+
"scr_dir1_threshold_100": 0.36912744428967686,
|
214 |
+
"scr_metric_threshold_100": -0.6540284119205886,
|
215 |
+
"scr_dir2_threshold_100": -0.6540284119205886,
|
216 |
+
"scr_dir1_threshold_500": -2.194630303311537,
|
217 |
+
"scr_metric_threshold_500": -0.5829381583522906,
|
218 |
+
"scr_dir2_threshold_500": -0.5829381583522906
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
|
222 |
+
"scr_dir1_threshold_2": 0.07246369299982378,
|
223 |
+
"scr_metric_threshold_2": 0.02830180191578314,
|
224 |
+
"scr_dir2_threshold_2": 0.02830180191578314,
|
225 |
+
"scr_dir1_threshold_5": 0.13043482016675478,
|
226 |
+
"scr_metric_threshold_5": -0.1981131757184187,
|
227 |
+
"scr_dir2_threshold_5": -0.1981131757184187,
|
228 |
+
"scr_dir1_threshold_10": 0.20289851316657856,
|
229 |
+
"scr_metric_threshold_10": -0.15094341214079066,
|
230 |
+
"scr_dir2_threshold_10": -0.15094341214079066,
|
231 |
+
"scr_dir1_threshold_20": 0.31159405266631424,
|
232 |
+
"scr_metric_threshold_20": -0.4009435527177748,
|
233 |
+
"scr_dir2_threshold_20": -0.4009435527177748,
|
234 |
+
"scr_dir1_threshold_50": -0.25362335741706316,
|
235 |
+
"scr_metric_threshold_50": -0.4198115143796197,
|
236 |
+
"scr_dir2_threshold_50": -0.4198115143796197,
|
237 |
+
"scr_dir1_threshold_100": -0.7753622061664023,
|
238 |
+
"scr_metric_threshold_100": -0.7688681022388291,
|
239 |
+
"scr_dir2_threshold_100": -0.7688681022388291,
|
240 |
+
"scr_dir1_threshold_500": -1.731884076750044,
|
241 |
+
"scr_metric_threshold_500": -0.6556603322677598,
|
242 |
+
"scr_dir2_threshold_500": -0.6556603322677598
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
|
246 |
+
"scr_dir1_threshold_2": 0.17322841667108618,
|
247 |
+
"scr_metric_threshold_2": 0.19277098627968173,
|
248 |
+
"scr_dir2_threshold_2": 0.19277098627968173,
|
249 |
+
"scr_dir1_threshold_5": -0.2283462719181943,
|
250 |
+
"scr_metric_threshold_5": -0.2409638525376377,
|
251 |
+
"scr_dir2_threshold_5": -0.2409638525376377,
|
252 |
+
"scr_dir1_threshold_10": 0.2204723596440055,
|
253 |
+
"scr_metric_threshold_10": -0.21686753909669523,
|
254 |
+
"scr_dir2_threshold_10": -0.21686753909669523,
|
255 |
+
"scr_dir1_threshold_20": 0.36220465789054995,
|
256 |
+
"scr_metric_threshold_20": -0.5381525960215219,
|
257 |
+
"scr_dir2_threshold_20": -0.5381525960215219,
|
258 |
+
"scr_dir1_threshold_50": 0.5196852500132585,
|
259 |
+
"scr_metric_threshold_50": -0.4859437174194206,
|
260 |
+
"scr_dir2_threshold_50": -0.4859437174194206,
|
261 |
+
"scr_dir1_threshold_100": -0.6220470482332859,
|
262 |
+
"scr_metric_threshold_100": -0.49397598148378213,
|
263 |
+
"scr_dir2_threshold_100": -0.49397598148378213,
|
264 |
+
"scr_dir1_threshold_500": -2.5354326052338494,
|
265 |
+
"scr_metric_threshold_500": -0.5783131982151163,
|
266 |
+
"scr_dir2_threshold_500": -0.5783131982151163
|
267 |
+
},
|
268 |
+
{
|
269 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
|
270 |
+
"scr_dir1_threshold_2": 0.012121054489114207,
|
271 |
+
"scr_metric_threshold_2": -0.038888717801489385,
|
272 |
+
"scr_dir2_threshold_2": -0.038888717801489385,
|
273 |
+
"scr_dir1_threshold_5": 0.054545467681462706,
|
274 |
+
"scr_metric_threshold_5": -0.08333325054910777,
|
275 |
+
"scr_dir2_threshold_5": -0.08333325054910777,
|
276 |
+
"scr_dir1_threshold_10": 0.04242405195212411,
|
277 |
+
"scr_metric_threshold_10": 0.10555551692291695,
|
278 |
+
"scr_dir2_threshold_10": 0.10555551692291695,
|
279 |
+
"scr_dir1_threshold_20": -0.0787879378999155,
|
280 |
+
"scr_metric_threshold_20": 0.02222226637380919,
|
281 |
+
"scr_dir2_threshold_20": 0.02222226637380919,
|
282 |
+
"scr_dir1_threshold_50": -0.7636361863002534,
|
283 |
+
"scr_metric_threshold_50": -0.3222220345779776,
|
284 |
+
"scr_dir2_threshold_50": -0.3222220345779776,
|
285 |
+
"scr_dir1_threshold_100": -1.266666449922532,
|
286 |
+
"scr_metric_threshold_100": -0.57777760117143,
|
287 |
+
"scr_dir2_threshold_100": -0.57777760117143,
|
288 |
+
"scr_dir1_threshold_500": -1.4484847959408158,
|
289 |
+
"scr_metric_threshold_500": -1.2000000662273804,
|
290 |
+
"scr_dir2_threshold_500": -1.2000000662273804
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
294 |
+
"sae_lens_id": "custom_sae",
|
295 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_TopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_5",
|
296 |
+
"sae_lens_version": "5.3.1",
|
297 |
+
"sae_cfg_dict": {
|
298 |
+
"model_name": "pythia-160m-deduped",
|
299 |
+
"d_in": 768,
|
300 |
+
"d_sae": 4096,
|
301 |
+
"hook_layer": 8,
|
302 |
+
"hook_name": "blocks.8.hook_resid_post",
|
303 |
+
"context_size": null,
|
304 |
+
"hook_head_index": null,
|
305 |
+
"architecture": "topk",
|
306 |
+
"apply_b_dec_to_input": null,
|
307 |
+
"finetuning_scaling_factor": null,
|
308 |
+
"activation_fn_str": "",
|
309 |
+
"prepend_bos": true,
|
310 |
+
"normalize_activations": "none",
|
311 |
+
"dtype": "float32",
|
312 |
+
"device": "",
|
313 |
+
"dataset_path": "",
|
314 |
+
"dataset_trust_remote_code": true,
|
315 |
+
"seqpos_slice": [
|
316 |
+
null
|
317 |
+
],
|
318 |
+
"training_tokens": 499998720,
|
319 |
+
"sae_lens_training_version": null,
|
320 |
+
"neuronpedia_id": null
|
321 |
+
},
|
322 |
+
"eval_result_unstructured": null
|
323 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_11_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 16,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "7e33e687-c7bd-46db-b82b-e25d54cfd24c",
|
30 |
+
"datetime_epoch_millis": 1737047550605,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9328125406056642,
|
34 |
+
"llm_top_1_test_accuracy": 0.65114375,
|
35 |
+
"llm_top_2_test_accuracy": 0.730825,
|
36 |
+
"llm_top_5_test_accuracy": 0.8192687500000001,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9234125405550003,
|
44 |
+
"sae_top_1_test_accuracy": 0.7342499999999998,
|
45 |
+
"sae_top_2_test_accuracy": 0.7866125,
|
46 |
+
"sae_top_5_test_accuracy": 0.825375,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9536000490188599,
|
57 |
+
"llm_top_1_test_accuracy": 0.6024,
|
58 |
+
"llm_top_2_test_accuracy": 0.7208,
|
59 |
+
"llm_top_5_test_accuracy": 0.8072000000000001,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9470000505447388,
|
65 |
+
"sae_top_1_test_accuracy": 0.7342000000000001,
|
66 |
+
"sae_top_2_test_accuracy": 0.8064,
|
67 |
+
"sae_top_5_test_accuracy": 0.8254000000000001,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9330000519752503,
|
76 |
+
"llm_top_1_test_accuracy": 0.6324,
|
77 |
+
"llm_top_2_test_accuracy": 0.6996,
|
78 |
+
"llm_top_5_test_accuracy": 0.8076000000000001,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9224000453948975,
|
84 |
+
"sae_top_1_test_accuracy": 0.6892,
|
85 |
+
"sae_top_2_test_accuracy": 0.7552,
|
86 |
+
"sae_top_5_test_accuracy": 0.7969999999999999,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9112000465393066,
|
95 |
+
"llm_top_1_test_accuracy": 0.6488,
|
96 |
+
"llm_top_2_test_accuracy": 0.722,
|
97 |
+
"llm_top_5_test_accuracy": 0.7896,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.8986000299453736,
|
103 |
+
"sae_top_1_test_accuracy": 0.6952,
|
104 |
+
"sae_top_2_test_accuracy": 0.7466,
|
105 |
+
"sae_top_5_test_accuracy": 0.8026,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.8668000340461731,
|
114 |
+
"llm_top_1_test_accuracy": 0.6412,
|
115 |
+
"llm_top_2_test_accuracy": 0.6869999999999999,
|
116 |
+
"llm_top_5_test_accuracy": 0.738,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.8562000393867493,
|
122 |
+
"sae_top_1_test_accuracy": 0.6546,
|
123 |
+
"sae_top_2_test_accuracy": 0.6952,
|
124 |
+
"sae_top_5_test_accuracy": 0.75,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.8975000381469727,
|
133 |
+
"llm_top_1_test_accuracy": 0.64,
|
134 |
+
"llm_top_2_test_accuracy": 0.717,
|
135 |
+
"llm_top_5_test_accuracy": 0.789,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.8815000355243683,
|
141 |
+
"sae_top_1_test_accuracy": 0.666,
|
142 |
+
"sae_top_2_test_accuracy": 0.713,
|
143 |
+
"sae_top_5_test_accuracy": 0.753,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9640000343322754,
|
152 |
+
"llm_top_1_test_accuracy": 0.6885999999999999,
|
153 |
+
"llm_top_2_test_accuracy": 0.6950000000000001,
|
154 |
+
"llm_top_5_test_accuracy": 0.7953999999999999,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9496000409126282,
|
160 |
+
"sae_top_1_test_accuracy": 0.7234,
|
161 |
+
"sae_top_2_test_accuracy": 0.7807999999999999,
|
162 |
+
"sae_top_5_test_accuracy": 0.8251999999999999,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9370000511407852,
|
171 |
+
"llm_top_1_test_accuracy": 0.67675,
|
172 |
+
"llm_top_2_test_accuracy": 0.7130000000000001,
|
173 |
+
"llm_top_5_test_accuracy": 0.8327500000000001,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9330000579357147,
|
179 |
+
"sae_top_1_test_accuracy": 0.7889999999999999,
|
180 |
+
"sae_top_2_test_accuracy": 0.8065,
|
181 |
+
"sae_top_5_test_accuracy": 0.8519999999999999,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9994000196456909,
|
190 |
+
"llm_top_1_test_accuracy": 0.6789999999999999,
|
191 |
+
"llm_top_2_test_accuracy": 0.8921999999999999,
|
192 |
+
"llm_top_5_test_accuracy": 0.9945999999999999,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9990000247955322,
|
198 |
+
"sae_top_1_test_accuracy": 0.9224,
|
199 |
+
"sae_top_2_test_accuracy": 0.9892,
|
200 |
+
"sae_top_5_test_accuracy": 0.9978,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_11",
|
210 |
+
"sae_lens_version": "5.3.1",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "pythia-160m-deduped",
|
213 |
+
"d_in": 768,
|
214 |
+
"d_sae": 4096,
|
215 |
+
"hook_layer": 8,
|
216 |
+
"hook_name": "blocks.8.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "float32",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": 499998720,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9080000519752502,
|
240 |
+
"1": 0.9490000605583191,
|
241 |
+
"2": 0.937000036239624,
|
242 |
+
"6": 0.9820000529289246,
|
243 |
+
"9": 0.9590000510215759
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9230000376701355,
|
247 |
+
"1": 0.9530000686645508,
|
248 |
+
"2": 0.937000036239624,
|
249 |
+
"6": 0.9860000610351562,
|
250 |
+
"9": 0.9690000414848328
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.496,
|
254 |
+
"1": 0.574,
|
255 |
+
"2": 0.754,
|
256 |
+
"6": 0.632,
|
257 |
+
"9": 0.556
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.686,
|
261 |
+
"1": 0.613,
|
262 |
+
"2": 0.763,
|
263 |
+
"6": 0.8,
|
264 |
+
"9": 0.742
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.734,
|
268 |
+
"1": 0.755,
|
269 |
+
"2": 0.812,
|
270 |
+
"6": 0.855,
|
271 |
+
"9": 0.88
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.673,
|
275 |
+
"1": 0.681,
|
276 |
+
"2": 0.79,
|
277 |
+
"6": 0.775,
|
278 |
+
"9": 0.752
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.708,
|
282 |
+
"1": 0.73,
|
283 |
+
"2": 0.826,
|
284 |
+
"6": 0.906,
|
285 |
+
"9": 0.862
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.719,
|
289 |
+
"1": 0.739,
|
290 |
+
"2": 0.851,
|
291 |
+
"6": 0.929,
|
292 |
+
"9": 0.889
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9440000653266907,
|
298 |
+
"13": 0.9230000376701355,
|
299 |
+
"14": 0.9360000491142273,
|
300 |
+
"18": 0.8750000596046448,
|
301 |
+
"19": 0.9340000152587891
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9490000605583191,
|
305 |
+
"13": 0.9280000329017639,
|
306 |
+
"14": 0.9520000219345093,
|
307 |
+
"18": 0.8880000710487366,
|
308 |
+
"19": 0.9480000734329224
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.772,
|
312 |
+
"13": 0.61,
|
313 |
+
"14": 0.6,
|
314 |
+
"18": 0.603,
|
315 |
+
"19": 0.577
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.787,
|
319 |
+
"13": 0.619,
|
320 |
+
"14": 0.675,
|
321 |
+
"18": 0.711,
|
322 |
+
"19": 0.706
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.834,
|
326 |
+
"13": 0.798,
|
327 |
+
"14": 0.819,
|
328 |
+
"18": 0.747,
|
329 |
+
"19": 0.84
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.609,
|
333 |
+
"13": 0.731,
|
334 |
+
"14": 0.714,
|
335 |
+
"18": 0.651,
|
336 |
+
"19": 0.741
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.794,
|
340 |
+
"13": 0.752,
|
341 |
+
"14": 0.727,
|
342 |
+
"18": 0.691,
|
343 |
+
"19": 0.812
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.848,
|
347 |
+
"13": 0.796,
|
348 |
+
"14": 0.759,
|
349 |
+
"18": 0.722,
|
350 |
+
"19": 0.86
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9330000281333923,
|
356 |
+
"21": 0.8750000596046448,
|
357 |
+
"22": 0.8920000195503235,
|
358 |
+
"25": 0.9380000233650208,
|
359 |
+
"26": 0.8550000190734863
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9440000653266907,
|
363 |
+
"21": 0.8950000405311584,
|
364 |
+
"22": 0.8970000147819519,
|
365 |
+
"25": 0.9480000734329224,
|
366 |
+
"26": 0.8720000386238098
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.77,
|
370 |
+
"21": 0.782,
|
371 |
+
"22": 0.496,
|
372 |
+
"25": 0.632,
|
373 |
+
"26": 0.564
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.82,
|
377 |
+
"21": 0.801,
|
378 |
+
"22": 0.64,
|
379 |
+
"25": 0.751,
|
380 |
+
"26": 0.598
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.849,
|
384 |
+
"21": 0.816,
|
385 |
+
"22": 0.706,
|
386 |
+
"25": 0.836,
|
387 |
+
"26": 0.741
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.719,
|
391 |
+
"21": 0.746,
|
392 |
+
"22": 0.634,
|
393 |
+
"25": 0.786,
|
394 |
+
"26": 0.591
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.811,
|
398 |
+
"21": 0.775,
|
399 |
+
"22": 0.669,
|
400 |
+
"25": 0.849,
|
401 |
+
"26": 0.629
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.877,
|
405 |
+
"21": 0.8,
|
406 |
+
"22": 0.723,
|
407 |
+
"25": 0.878,
|
408 |
+
"26": 0.735
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9130000472068787,
|
414 |
+
"2": 0.8720000386238098,
|
415 |
+
"3": 0.862000048160553,
|
416 |
+
"5": 0.8510000109672546,
|
417 |
+
"6": 0.7830000519752502
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9190000295639038,
|
421 |
+
"2": 0.8770000338554382,
|
422 |
+
"3": 0.8710000514984131,
|
423 |
+
"5": 0.859000027179718,
|
424 |
+
"6": 0.8080000281333923
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.685,
|
428 |
+
"2": 0.638,
|
429 |
+
"3": 0.641,
|
430 |
+
"5": 0.564,
|
431 |
+
"6": 0.678
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.762,
|
435 |
+
"2": 0.695,
|
436 |
+
"3": 0.654,
|
437 |
+
"5": 0.603,
|
438 |
+
"6": 0.721
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.804,
|
442 |
+
"2": 0.729,
|
443 |
+
"3": 0.725,
|
444 |
+
"5": 0.718,
|
445 |
+
"6": 0.714
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.752,
|
449 |
+
"2": 0.68,
|
450 |
+
"3": 0.598,
|
451 |
+
"5": 0.613,
|
452 |
+
"6": 0.63
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.781,
|
456 |
+
"2": 0.682,
|
457 |
+
"3": 0.668,
|
458 |
+
"5": 0.692,
|
459 |
+
"6": 0.653
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.82,
|
463 |
+
"2": 0.757,
|
464 |
+
"3": 0.724,
|
465 |
+
"5": 0.737,
|
466 |
+
"6": 0.712
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.8770000338554382,
|
472 |
+
"5.0": 0.8860000371932983
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.8980000615119934,
|
476 |
+
"5.0": 0.8970000147819519
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.64,
|
480 |
+
"5.0": 0.64
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.717,
|
484 |
+
"5.0": 0.717
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.789,
|
488 |
+
"5.0": 0.789
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.666,
|
492 |
+
"5.0": 0.666
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.713,
|
496 |
+
"5.0": 0.713
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.753,
|
500 |
+
"5.0": 0.753
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.9220000505447388,
|
506 |
+
"Python": 0.9790000319480896,
|
507 |
+
"HTML": 0.9750000238418579,
|
508 |
+
"Java": 0.937000036239624,
|
509 |
+
"PHP": 0.9350000619888306
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.9610000252723694,
|
513 |
+
"Python": 0.9780000448226929,
|
514 |
+
"HTML": 0.984000027179718,
|
515 |
+
"Java": 0.9450000524520874,
|
516 |
+
"PHP": 0.9520000219345093
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.588,
|
520 |
+
"Python": 0.631,
|
521 |
+
"HTML": 0.923,
|
522 |
+
"Java": 0.642,
|
523 |
+
"PHP": 0.659
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.598,
|
527 |
+
"Python": 0.628,
|
528 |
+
"HTML": 0.926,
|
529 |
+
"Java": 0.657,
|
530 |
+
"PHP": 0.666
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.718,
|
534 |
+
"Python": 0.816,
|
535 |
+
"HTML": 0.938,
|
536 |
+
"Java": 0.757,
|
537 |
+
"PHP": 0.748
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.672,
|
541 |
+
"Python": 0.668,
|
542 |
+
"HTML": 0.915,
|
543 |
+
"Java": 0.709,
|
544 |
+
"PHP": 0.653
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.728,
|
548 |
+
"Python": 0.833,
|
549 |
+
"HTML": 0.95,
|
550 |
+
"Java": 0.743,
|
551 |
+
"PHP": 0.65
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.738,
|
555 |
+
"Python": 0.858,
|
556 |
+
"HTML": 0.946,
|
557 |
+
"Java": 0.807,
|
558 |
+
"PHP": 0.777
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9250000715255737,
|
564 |
+
"1": 0.9690000414848328,
|
565 |
+
"2": 0.9080000519752502,
|
566 |
+
"3": 0.9300000667572021
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9230000376701355,
|
570 |
+
"1": 0.971000075340271,
|
571 |
+
"2": 0.9190000295639038,
|
572 |
+
"3": 0.9350000619888306
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.782,
|
576 |
+
"1": 0.582,
|
577 |
+
"2": 0.762,
|
578 |
+
"3": 0.581
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.784,
|
582 |
+
"1": 0.612,
|
583 |
+
"2": 0.785,
|
584 |
+
"3": 0.671
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.847,
|
588 |
+
"1": 0.886,
|
589 |
+
"2": 0.813,
|
590 |
+
"3": 0.785
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.751,
|
594 |
+
"1": 0.913,
|
595 |
+
"2": 0.756,
|
596 |
+
"3": 0.736
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.786,
|
600 |
+
"1": 0.924,
|
601 |
+
"2": 0.771,
|
602 |
+
"3": 0.745
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.83,
|
606 |
+
"1": 0.924,
|
607 |
+
"2": 0.833,
|
608 |
+
"3": 0.821
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 0.9980000257492065,
|
614 |
+
"fr": 1.0,
|
615 |
+
"de": 1.0,
|
616 |
+
"es": 0.9980000257492065,
|
617 |
+
"nl": 0.999000072479248
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 1.0,
|
621 |
+
"fr": 1.0,
|
622 |
+
"de": 0.9980000257492065,
|
623 |
+
"es": 0.999000072479248,
|
624 |
+
"nl": 1.0
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.694,
|
628 |
+
"fr": 0.567,
|
629 |
+
"de": 0.808,
|
630 |
+
"es": 0.735,
|
631 |
+
"nl": 0.591
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.984,
|
635 |
+
"fr": 0.981,
|
636 |
+
"de": 0.813,
|
637 |
+
"es": 0.9,
|
638 |
+
"nl": 0.783
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.99,
|
642 |
+
"fr": 0.997,
|
643 |
+
"de": 0.998,
|
644 |
+
"es": 0.994,
|
645 |
+
"nl": 0.994
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 0.999,
|
649 |
+
"fr": 0.981,
|
650 |
+
"de": 0.998,
|
651 |
+
"es": 0.997,
|
652 |
+
"nl": 0.637
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 0.997,
|
656 |
+
"fr": 0.998,
|
657 |
+
"de": 0.998,
|
658 |
+
"es": 0.997,
|
659 |
+
"nl": 0.956
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 0.998,
|
663 |
+
"fr": 0.998,
|
664 |
+
"de": 0.998,
|
665 |
+
"es": 0.997,
|
666 |
+
"nl": 0.998
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_6_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 16,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "a3a1287a-bc82-40e2-b89e-2d2e12f4ebf2",
|
30 |
+
"datetime_epoch_millis": 1737047349312,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9328125406056642,
|
34 |
+
"llm_top_1_test_accuracy": 0.65114375,
|
35 |
+
"llm_top_2_test_accuracy": 0.730825,
|
36 |
+
"llm_top_5_test_accuracy": 0.8192687500000001,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9301562920212745,
|
44 |
+
"sae_top_1_test_accuracy": 0.73624375,
|
45 |
+
"sae_top_2_test_accuracy": 0.7912,
|
46 |
+
"sae_top_5_test_accuracy": 0.8312,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9536000490188599,
|
57 |
+
"llm_top_1_test_accuracy": 0.6024,
|
58 |
+
"llm_top_2_test_accuracy": 0.7208,
|
59 |
+
"llm_top_5_test_accuracy": 0.8072000000000001,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9462000489234924,
|
65 |
+
"sae_top_1_test_accuracy": 0.726,
|
66 |
+
"sae_top_2_test_accuracy": 0.8124,
|
67 |
+
"sae_top_5_test_accuracy": 0.8320000000000001,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9330000519752503,
|
76 |
+
"llm_top_1_test_accuracy": 0.6324,
|
77 |
+
"llm_top_2_test_accuracy": 0.6996,
|
78 |
+
"llm_top_5_test_accuracy": 0.8076000000000001,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9258000373840332,
|
84 |
+
"sae_top_1_test_accuracy": 0.7316,
|
85 |
+
"sae_top_2_test_accuracy": 0.7836000000000001,
|
86 |
+
"sae_top_5_test_accuracy": 0.8188000000000001,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9112000465393066,
|
95 |
+
"llm_top_1_test_accuracy": 0.6488,
|
96 |
+
"llm_top_2_test_accuracy": 0.722,
|
97 |
+
"llm_top_5_test_accuracy": 0.7896,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9086000323295593,
|
103 |
+
"sae_top_1_test_accuracy": 0.7288,
|
104 |
+
"sae_top_2_test_accuracy": 0.7404,
|
105 |
+
"sae_top_5_test_accuracy": 0.8102,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.8668000340461731,
|
114 |
+
"llm_top_1_test_accuracy": 0.6412,
|
115 |
+
"llm_top_2_test_accuracy": 0.6869999999999999,
|
116 |
+
"llm_top_5_test_accuracy": 0.738,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.8702000498771667,
|
122 |
+
"sae_top_1_test_accuracy": 0.7030000000000001,
|
123 |
+
"sae_top_2_test_accuracy": 0.7286,
|
124 |
+
"sae_top_5_test_accuracy": 0.7771999999999999,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.8975000381469727,
|
133 |
+
"llm_top_1_test_accuracy": 0.64,
|
134 |
+
"llm_top_2_test_accuracy": 0.717,
|
135 |
+
"llm_top_5_test_accuracy": 0.789,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.8875000476837158,
|
141 |
+
"sae_top_1_test_accuracy": 0.572,
|
142 |
+
"sae_top_2_test_accuracy": 0.647,
|
143 |
+
"sae_top_5_test_accuracy": 0.679,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9640000343322754,
|
152 |
+
"llm_top_1_test_accuracy": 0.6885999999999999,
|
153 |
+
"llm_top_2_test_accuracy": 0.6950000000000001,
|
154 |
+
"llm_top_5_test_accuracy": 0.7953999999999999,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9616000413894653,
|
160 |
+
"sae_top_1_test_accuracy": 0.6594,
|
161 |
+
"sae_top_2_test_accuracy": 0.8214,
|
162 |
+
"sae_top_5_test_accuracy": 0.8766,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9370000511407852,
|
171 |
+
"llm_top_1_test_accuracy": 0.67675,
|
172 |
+
"llm_top_2_test_accuracy": 0.7130000000000001,
|
173 |
+
"llm_top_5_test_accuracy": 0.8327500000000001,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9417500495910645,
|
179 |
+
"sae_top_1_test_accuracy": 0.78075,
|
180 |
+
"sae_top_2_test_accuracy": 0.8079999999999999,
|
181 |
+
"sae_top_5_test_accuracy": 0.865,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9994000196456909,
|
190 |
+
"llm_top_1_test_accuracy": 0.6789999999999999,
|
191 |
+
"llm_top_2_test_accuracy": 0.8921999999999999,
|
192 |
+
"llm_top_5_test_accuracy": 0.9945999999999999,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9996000289916992,
|
198 |
+
"sae_top_1_test_accuracy": 0.9883999999999998,
|
199 |
+
"sae_top_2_test_accuracy": 0.9882,
|
200 |
+
"sae_top_5_test_accuracy": 0.9907999999999999,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_6",
|
210 |
+
"sae_lens_version": "5.3.1",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "pythia-160m-deduped",
|
213 |
+
"d_in": 768,
|
214 |
+
"d_sae": 4096,
|
215 |
+
"hook_layer": 8,
|
216 |
+
"hook_name": "blocks.8.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "float32",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": 499998720,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9200000166893005,
|
240 |
+
"1": 0.9480000734329224,
|
241 |
+
"2": 0.9260000586509705,
|
242 |
+
"6": 0.9790000319480896,
|
243 |
+
"9": 0.9580000638961792
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9230000376701355,
|
247 |
+
"1": 0.9530000686645508,
|
248 |
+
"2": 0.937000036239624,
|
249 |
+
"6": 0.9860000610351562,
|
250 |
+
"9": 0.9690000414848328
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.496,
|
254 |
+
"1": 0.574,
|
255 |
+
"2": 0.754,
|
256 |
+
"6": 0.632,
|
257 |
+
"9": 0.556
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.686,
|
261 |
+
"1": 0.613,
|
262 |
+
"2": 0.763,
|
263 |
+
"6": 0.8,
|
264 |
+
"9": 0.742
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.734,
|
268 |
+
"1": 0.755,
|
269 |
+
"2": 0.812,
|
270 |
+
"6": 0.855,
|
271 |
+
"9": 0.88
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.666,
|
275 |
+
"1": 0.665,
|
276 |
+
"2": 0.643,
|
277 |
+
"6": 0.752,
|
278 |
+
"9": 0.904
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.746,
|
282 |
+
"1": 0.661,
|
283 |
+
"2": 0.811,
|
284 |
+
"6": 0.927,
|
285 |
+
"9": 0.917
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.767,
|
289 |
+
"1": 0.684,
|
290 |
+
"2": 0.861,
|
291 |
+
"6": 0.925,
|
292 |
+
"9": 0.923
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9480000734329224,
|
298 |
+
"13": 0.9290000200271606,
|
299 |
+
"14": 0.9320000410079956,
|
300 |
+
"18": 0.8770000338554382,
|
301 |
+
"19": 0.9430000185966492
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9490000605583191,
|
305 |
+
"13": 0.9280000329017639,
|
306 |
+
"14": 0.9520000219345093,
|
307 |
+
"18": 0.8880000710487366,
|
308 |
+
"19": 0.9480000734329224
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.772,
|
312 |
+
"13": 0.61,
|
313 |
+
"14": 0.6,
|
314 |
+
"18": 0.603,
|
315 |
+
"19": 0.577
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.787,
|
319 |
+
"13": 0.619,
|
320 |
+
"14": 0.675,
|
321 |
+
"18": 0.711,
|
322 |
+
"19": 0.706
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.834,
|
326 |
+
"13": 0.798,
|
327 |
+
"14": 0.819,
|
328 |
+
"18": 0.747,
|
329 |
+
"19": 0.84
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.83,
|
333 |
+
"13": 0.734,
|
334 |
+
"14": 0.821,
|
335 |
+
"18": 0.637,
|
336 |
+
"19": 0.636
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.854,
|
340 |
+
"13": 0.741,
|
341 |
+
"14": 0.825,
|
342 |
+
"18": 0.663,
|
343 |
+
"19": 0.835
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.877,
|
347 |
+
"13": 0.782,
|
348 |
+
"14": 0.838,
|
349 |
+
"18": 0.749,
|
350 |
+
"19": 0.848
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9420000314712524,
|
356 |
+
"21": 0.8970000147819519,
|
357 |
+
"22": 0.8950000405311584,
|
358 |
+
"25": 0.9380000233650208,
|
359 |
+
"26": 0.8710000514984131
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9440000653266907,
|
363 |
+
"21": 0.8950000405311584,
|
364 |
+
"22": 0.8970000147819519,
|
365 |
+
"25": 0.9480000734329224,
|
366 |
+
"26": 0.8720000386238098
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.77,
|
370 |
+
"21": 0.782,
|
371 |
+
"22": 0.496,
|
372 |
+
"25": 0.632,
|
373 |
+
"26": 0.564
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.82,
|
377 |
+
"21": 0.801,
|
378 |
+
"22": 0.64,
|
379 |
+
"25": 0.751,
|
380 |
+
"26": 0.598
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.849,
|
384 |
+
"21": 0.816,
|
385 |
+
"22": 0.706,
|
386 |
+
"25": 0.836,
|
387 |
+
"26": 0.741
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.824,
|
391 |
+
"21": 0.689,
|
392 |
+
"22": 0.627,
|
393 |
+
"25": 0.833,
|
394 |
+
"26": 0.671
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.834,
|
398 |
+
"21": 0.73,
|
399 |
+
"22": 0.626,
|
400 |
+
"25": 0.846,
|
401 |
+
"26": 0.666
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.884,
|
405 |
+
"21": 0.751,
|
406 |
+
"22": 0.863,
|
407 |
+
"25": 0.855,
|
408 |
+
"26": 0.698
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9050000309944153,
|
414 |
+
"2": 0.8980000615119934,
|
415 |
+
"3": 0.8670000433921814,
|
416 |
+
"5": 0.8610000610351562,
|
417 |
+
"6": 0.8200000524520874
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9190000295639038,
|
421 |
+
"2": 0.8770000338554382,
|
422 |
+
"3": 0.8710000514984131,
|
423 |
+
"5": 0.859000027179718,
|
424 |
+
"6": 0.8080000281333923
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.685,
|
428 |
+
"2": 0.638,
|
429 |
+
"3": 0.641,
|
430 |
+
"5": 0.564,
|
431 |
+
"6": 0.678
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.762,
|
435 |
+
"2": 0.695,
|
436 |
+
"3": 0.654,
|
437 |
+
"5": 0.603,
|
438 |
+
"6": 0.721
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.804,
|
442 |
+
"2": 0.729,
|
443 |
+
"3": 0.725,
|
444 |
+
"5": 0.718,
|
445 |
+
"6": 0.714
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.669,
|
449 |
+
"2": 0.77,
|
450 |
+
"3": 0.599,
|
451 |
+
"5": 0.783,
|
452 |
+
"6": 0.694
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.775,
|
456 |
+
"2": 0.77,
|
457 |
+
"3": 0.583,
|
458 |
+
"5": 0.798,
|
459 |
+
"6": 0.717
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.819,
|
463 |
+
"2": 0.774,
|
464 |
+
"3": 0.743,
|
465 |
+
"5": 0.803,
|
466 |
+
"6": 0.747
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.8880000710487366,
|
472 |
+
"5.0": 0.8870000243186951
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.8980000615119934,
|
476 |
+
"5.0": 0.8970000147819519
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.64,
|
480 |
+
"5.0": 0.64
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.717,
|
484 |
+
"5.0": 0.717
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.789,
|
488 |
+
"5.0": 0.789
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.572,
|
492 |
+
"5.0": 0.572
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.647,
|
496 |
+
"5.0": 0.647
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.679,
|
500 |
+
"5.0": 0.679
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.9410000443458557,
|
506 |
+
"Python": 0.9800000190734863,
|
507 |
+
"HTML": 0.9820000529289246,
|
508 |
+
"Java": 0.9610000252723694,
|
509 |
+
"PHP": 0.9440000653266907
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.9610000252723694,
|
513 |
+
"Python": 0.9780000448226929,
|
514 |
+
"HTML": 0.984000027179718,
|
515 |
+
"Java": 0.9450000524520874,
|
516 |
+
"PHP": 0.9520000219345093
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.588,
|
520 |
+
"Python": 0.631,
|
521 |
+
"HTML": 0.923,
|
522 |
+
"Java": 0.642,
|
523 |
+
"PHP": 0.659
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.598,
|
527 |
+
"Python": 0.628,
|
528 |
+
"HTML": 0.926,
|
529 |
+
"Java": 0.657,
|
530 |
+
"PHP": 0.666
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.718,
|
534 |
+
"Python": 0.816,
|
535 |
+
"HTML": 0.938,
|
536 |
+
"Java": 0.757,
|
537 |
+
"PHP": 0.748
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.6,
|
541 |
+
"Python": 0.668,
|
542 |
+
"HTML": 0.787,
|
543 |
+
"Java": 0.582,
|
544 |
+
"PHP": 0.66
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.767,
|
548 |
+
"Python": 0.895,
|
549 |
+
"HTML": 0.818,
|
550 |
+
"Java": 0.731,
|
551 |
+
"PHP": 0.896
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.794,
|
555 |
+
"Python": 0.9,
|
556 |
+
"HTML": 0.912,
|
557 |
+
"Java": 0.873,
|
558 |
+
"PHP": 0.904
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9440000653266907,
|
564 |
+
"1": 0.9730000495910645,
|
565 |
+
"2": 0.9100000262260437,
|
566 |
+
"3": 0.940000057220459
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9230000376701355,
|
570 |
+
"1": 0.971000075340271,
|
571 |
+
"2": 0.9190000295639038,
|
572 |
+
"3": 0.9350000619888306
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.782,
|
576 |
+
"1": 0.582,
|
577 |
+
"2": 0.762,
|
578 |
+
"3": 0.581
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.784,
|
582 |
+
"1": 0.612,
|
583 |
+
"2": 0.785,
|
584 |
+
"3": 0.671
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.847,
|
588 |
+
"1": 0.886,
|
589 |
+
"2": 0.813,
|
590 |
+
"3": 0.785
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.796,
|
594 |
+
"1": 0.936,
|
595 |
+
"2": 0.764,
|
596 |
+
"3": 0.627
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.808,
|
600 |
+
"1": 0.938,
|
601 |
+
"2": 0.796,
|
602 |
+
"3": 0.69
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.844,
|
606 |
+
"1": 0.945,
|
607 |
+
"2": 0.872,
|
608 |
+
"3": 0.799
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 0.999000072479248,
|
614 |
+
"fr": 1.0,
|
615 |
+
"de": 1.0,
|
616 |
+
"es": 1.0,
|
617 |
+
"nl": 0.999000072479248
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 1.0,
|
621 |
+
"fr": 1.0,
|
622 |
+
"de": 0.9980000257492065,
|
623 |
+
"es": 0.999000072479248,
|
624 |
+
"nl": 1.0
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.694,
|
628 |
+
"fr": 0.567,
|
629 |
+
"de": 0.808,
|
630 |
+
"es": 0.735,
|
631 |
+
"nl": 0.591
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.984,
|
635 |
+
"fr": 0.981,
|
636 |
+
"de": 0.813,
|
637 |
+
"es": 0.9,
|
638 |
+
"nl": 0.783
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.99,
|
642 |
+
"fr": 0.997,
|
643 |
+
"de": 0.998,
|
644 |
+
"es": 0.994,
|
645 |
+
"nl": 0.994
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 0.954,
|
649 |
+
"fr": 0.998,
|
650 |
+
"de": 0.997,
|
651 |
+
"es": 0.994,
|
652 |
+
"nl": 0.999
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 0.951,
|
656 |
+
"fr": 0.998,
|
657 |
+
"de": 0.998,
|
658 |
+
"es": 0.995,
|
659 |
+
"nl": 0.999
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 0.962,
|
663 |
+
"fr": 0.997,
|
664 |
+
"de": 0.998,
|
665 |
+
"es": 0.997,
|
666 |
+
"nl": 1.0
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_7_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 16,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "280d146c-41c6-4939-85a2-fc3e1fa2d017",
|
30 |
+
"datetime_epoch_millis": 1737047769591,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9328125406056642,
|
34 |
+
"llm_top_1_test_accuracy": 0.65114375,
|
35 |
+
"llm_top_2_test_accuracy": 0.730825,
|
36 |
+
"llm_top_5_test_accuracy": 0.8192687500000001,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9345875456929207,
|
44 |
+
"sae_top_1_test_accuracy": 0.7391062500000001,
|
45 |
+
"sae_top_2_test_accuracy": 0.7961125,
|
46 |
+
"sae_top_5_test_accuracy": 0.8381812500000001,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9536000490188599,
|
57 |
+
"llm_top_1_test_accuracy": 0.6024,
|
58 |
+
"llm_top_2_test_accuracy": 0.7208,
|
59 |
+
"llm_top_5_test_accuracy": 0.8072000000000001,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9510000467300415,
|
65 |
+
"sae_top_1_test_accuracy": 0.7936,
|
66 |
+
"sae_top_2_test_accuracy": 0.7958000000000001,
|
67 |
+
"sae_top_5_test_accuracy": 0.8732,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9330000519752503,
|
76 |
+
"llm_top_1_test_accuracy": 0.6324,
|
77 |
+
"llm_top_2_test_accuracy": 0.6996,
|
78 |
+
"llm_top_5_test_accuracy": 0.8076000000000001,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9306000471115112,
|
84 |
+
"sae_top_1_test_accuracy": 0.6931999999999999,
|
85 |
+
"sae_top_2_test_accuracy": 0.7686,
|
86 |
+
"sae_top_5_test_accuracy": 0.8076000000000001,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9112000465393066,
|
95 |
+
"llm_top_1_test_accuracy": 0.6488,
|
96 |
+
"llm_top_2_test_accuracy": 0.722,
|
97 |
+
"llm_top_5_test_accuracy": 0.7896,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.9084000587463379,
|
103 |
+
"sae_top_1_test_accuracy": 0.7247999999999999,
|
104 |
+
"sae_top_2_test_accuracy": 0.7746,
|
105 |
+
"sae_top_5_test_accuracy": 0.8089999999999999,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.8668000340461731,
|
114 |
+
"llm_top_1_test_accuracy": 0.6412,
|
115 |
+
"llm_top_2_test_accuracy": 0.6869999999999999,
|
116 |
+
"llm_top_5_test_accuracy": 0.738,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.8750000476837159,
|
122 |
+
"sae_top_1_test_accuracy": 0.7066,
|
123 |
+
"sae_top_2_test_accuracy": 0.7220000000000001,
|
124 |
+
"sae_top_5_test_accuracy": 0.7598,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.8975000381469727,
|
133 |
+
"llm_top_1_test_accuracy": 0.64,
|
134 |
+
"llm_top_2_test_accuracy": 0.717,
|
135 |
+
"llm_top_5_test_accuracy": 0.789,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9025000631809235,
|
141 |
+
"sae_top_1_test_accuracy": 0.575,
|
142 |
+
"sae_top_2_test_accuracy": 0.659,
|
143 |
+
"sae_top_5_test_accuracy": 0.7,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9640000343322754,
|
152 |
+
"llm_top_1_test_accuracy": 0.6885999999999999,
|
153 |
+
"llm_top_2_test_accuracy": 0.6950000000000001,
|
154 |
+
"llm_top_5_test_accuracy": 0.7953999999999999,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9656000375747681,
|
160 |
+
"sae_top_1_test_accuracy": 0.6568,
|
161 |
+
"sae_top_2_test_accuracy": 0.8284,
|
162 |
+
"sae_top_5_test_accuracy": 0.8958,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9370000511407852,
|
171 |
+
"llm_top_1_test_accuracy": 0.67675,
|
172 |
+
"llm_top_2_test_accuracy": 0.7130000000000001,
|
173 |
+
"llm_top_5_test_accuracy": 0.8327500000000001,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9440000355243683,
|
179 |
+
"sae_top_1_test_accuracy": 0.8102499999999999,
|
180 |
+
"sae_top_2_test_accuracy": 0.8344999999999999,
|
181 |
+
"sae_top_5_test_accuracy": 0.87225,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9994000196456909,
|
190 |
+
"llm_top_1_test_accuracy": 0.6789999999999999,
|
191 |
+
"llm_top_2_test_accuracy": 0.8921999999999999,
|
192 |
+
"llm_top_5_test_accuracy": 0.9945999999999999,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9996000289916992,
|
198 |
+
"sae_top_1_test_accuracy": 0.9526,
|
199 |
+
"sae_top_2_test_accuracy": 0.986,
|
200 |
+
"sae_top_5_test_accuracy": 0.9878,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_7",
|
210 |
+
"sae_lens_version": "5.3.1",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "pythia-160m-deduped",
|
213 |
+
"d_in": 768,
|
214 |
+
"d_sae": 4096,
|
215 |
+
"hook_layer": 8,
|
216 |
+
"hook_name": "blocks.8.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "float32",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": 499998720,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9270000457763672,
|
240 |
+
"1": 0.9490000605583191,
|
241 |
+
"2": 0.9230000376701355,
|
242 |
+
"6": 0.9880000352859497,
|
243 |
+
"9": 0.968000054359436
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9230000376701355,
|
247 |
+
"1": 0.9530000686645508,
|
248 |
+
"2": 0.937000036239624,
|
249 |
+
"6": 0.9860000610351562,
|
250 |
+
"9": 0.9690000414848328
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.496,
|
254 |
+
"1": 0.574,
|
255 |
+
"2": 0.754,
|
256 |
+
"6": 0.632,
|
257 |
+
"9": 0.556
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.686,
|
261 |
+
"1": 0.613,
|
262 |
+
"2": 0.763,
|
263 |
+
"6": 0.8,
|
264 |
+
"9": 0.742
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.734,
|
268 |
+
"1": 0.755,
|
269 |
+
"2": 0.812,
|
270 |
+
"6": 0.855,
|
271 |
+
"9": 0.88
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.774,
|
275 |
+
"1": 0.672,
|
276 |
+
"2": 0.872,
|
277 |
+
"6": 0.739,
|
278 |
+
"9": 0.911
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.751,
|
282 |
+
"1": 0.675,
|
283 |
+
"2": 0.881,
|
284 |
+
"6": 0.76,
|
285 |
+
"9": 0.912
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.782,
|
289 |
+
"1": 0.835,
|
290 |
+
"2": 0.874,
|
291 |
+
"6": 0.941,
|
292 |
+
"9": 0.934
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9470000267028809,
|
298 |
+
"13": 0.9330000281333923,
|
299 |
+
"14": 0.9360000491142273,
|
300 |
+
"18": 0.8880000710487366,
|
301 |
+
"19": 0.9490000605583191
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9490000605583191,
|
305 |
+
"13": 0.9280000329017639,
|
306 |
+
"14": 0.9520000219345093,
|
307 |
+
"18": 0.8880000710487366,
|
308 |
+
"19": 0.9480000734329224
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.772,
|
312 |
+
"13": 0.61,
|
313 |
+
"14": 0.6,
|
314 |
+
"18": 0.603,
|
315 |
+
"19": 0.577
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.787,
|
319 |
+
"13": 0.619,
|
320 |
+
"14": 0.675,
|
321 |
+
"18": 0.711,
|
322 |
+
"19": 0.706
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.834,
|
326 |
+
"13": 0.798,
|
327 |
+
"14": 0.819,
|
328 |
+
"18": 0.747,
|
329 |
+
"19": 0.84
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.834,
|
333 |
+
"13": 0.62,
|
334 |
+
"14": 0.732,
|
335 |
+
"18": 0.635,
|
336 |
+
"19": 0.645
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.851,
|
340 |
+
"13": 0.721,
|
341 |
+
"14": 0.751,
|
342 |
+
"18": 0.675,
|
343 |
+
"19": 0.845
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.857,
|
347 |
+
"13": 0.753,
|
348 |
+
"14": 0.8,
|
349 |
+
"18": 0.773,
|
350 |
+
"19": 0.855
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9350000619888306,
|
356 |
+
"21": 0.8940000534057617,
|
357 |
+
"22": 0.9030000567436218,
|
358 |
+
"25": 0.940000057220459,
|
359 |
+
"26": 0.8700000643730164
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9440000653266907,
|
363 |
+
"21": 0.8950000405311584,
|
364 |
+
"22": 0.8970000147819519,
|
365 |
+
"25": 0.9480000734329224,
|
366 |
+
"26": 0.8720000386238098
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.77,
|
370 |
+
"21": 0.782,
|
371 |
+
"22": 0.496,
|
372 |
+
"25": 0.632,
|
373 |
+
"26": 0.564
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.82,
|
377 |
+
"21": 0.801,
|
378 |
+
"22": 0.64,
|
379 |
+
"25": 0.751,
|
380 |
+
"26": 0.598
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.849,
|
384 |
+
"21": 0.816,
|
385 |
+
"22": 0.706,
|
386 |
+
"25": 0.836,
|
387 |
+
"26": 0.741
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.812,
|
391 |
+
"21": 0.652,
|
392 |
+
"22": 0.692,
|
393 |
+
"25": 0.872,
|
394 |
+
"26": 0.596
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.837,
|
398 |
+
"21": 0.71,
|
399 |
+
"22": 0.806,
|
400 |
+
"25": 0.868,
|
401 |
+
"26": 0.652
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.846,
|
405 |
+
"21": 0.707,
|
406 |
+
"22": 0.841,
|
407 |
+
"25": 0.887,
|
408 |
+
"26": 0.764
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9240000247955322,
|
414 |
+
"2": 0.8980000615119934,
|
415 |
+
"3": 0.8720000386238098,
|
416 |
+
"5": 0.862000048160553,
|
417 |
+
"6": 0.8190000653266907
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9190000295639038,
|
421 |
+
"2": 0.8770000338554382,
|
422 |
+
"3": 0.8710000514984131,
|
423 |
+
"5": 0.859000027179718,
|
424 |
+
"6": 0.8080000281333923
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.685,
|
428 |
+
"2": 0.638,
|
429 |
+
"3": 0.641,
|
430 |
+
"5": 0.564,
|
431 |
+
"6": 0.678
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.762,
|
435 |
+
"2": 0.695,
|
436 |
+
"3": 0.654,
|
437 |
+
"5": 0.603,
|
438 |
+
"6": 0.721
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.804,
|
442 |
+
"2": 0.729,
|
443 |
+
"3": 0.725,
|
444 |
+
"5": 0.718,
|
445 |
+
"6": 0.714
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.789,
|
449 |
+
"2": 0.64,
|
450 |
+
"3": 0.66,
|
451 |
+
"5": 0.778,
|
452 |
+
"6": 0.666
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.776,
|
456 |
+
"2": 0.679,
|
457 |
+
"3": 0.658,
|
458 |
+
"5": 0.793,
|
459 |
+
"6": 0.704
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.82,
|
463 |
+
"2": 0.726,
|
464 |
+
"3": 0.717,
|
465 |
+
"5": 0.815,
|
466 |
+
"6": 0.721
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.9020000696182251,
|
472 |
+
"5.0": 0.9030000567436218
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.8980000615119934,
|
476 |
+
"5.0": 0.8970000147819519
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.64,
|
480 |
+
"5.0": 0.64
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.717,
|
484 |
+
"5.0": 0.717
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.789,
|
488 |
+
"5.0": 0.789
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.575,
|
492 |
+
"5.0": 0.575
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.659,
|
496 |
+
"5.0": 0.659
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.7,
|
500 |
+
"5.0": 0.7
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.9470000267028809,
|
506 |
+
"Python": 0.9800000190734863,
|
507 |
+
"HTML": 0.9810000658035278,
|
508 |
+
"Java": 0.9650000333786011,
|
509 |
+
"PHP": 0.9550000429153442
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.9610000252723694,
|
513 |
+
"Python": 0.9780000448226929,
|
514 |
+
"HTML": 0.984000027179718,
|
515 |
+
"Java": 0.9450000524520874,
|
516 |
+
"PHP": 0.9520000219345093
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.588,
|
520 |
+
"Python": 0.631,
|
521 |
+
"HTML": 0.923,
|
522 |
+
"Java": 0.642,
|
523 |
+
"PHP": 0.659
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.598,
|
527 |
+
"Python": 0.628,
|
528 |
+
"HTML": 0.926,
|
529 |
+
"Java": 0.657,
|
530 |
+
"PHP": 0.666
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.718,
|
534 |
+
"Python": 0.816,
|
535 |
+
"HTML": 0.938,
|
536 |
+
"Java": 0.757,
|
537 |
+
"PHP": 0.748
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.592,
|
541 |
+
"Python": 0.661,
|
542 |
+
"HTML": 0.771,
|
543 |
+
"Java": 0.608,
|
544 |
+
"PHP": 0.652
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.795,
|
548 |
+
"Python": 0.895,
|
549 |
+
"HTML": 0.826,
|
550 |
+
"Java": 0.729,
|
551 |
+
"PHP": 0.897
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.855,
|
555 |
+
"Python": 0.906,
|
556 |
+
"HTML": 0.93,
|
557 |
+
"Java": 0.89,
|
558 |
+
"PHP": 0.898
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9330000281333923,
|
564 |
+
"1": 0.9740000367164612,
|
565 |
+
"2": 0.9310000538825989,
|
566 |
+
"3": 0.9380000233650208
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9230000376701355,
|
570 |
+
"1": 0.971000075340271,
|
571 |
+
"2": 0.9190000295639038,
|
572 |
+
"3": 0.9350000619888306
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.782,
|
576 |
+
"1": 0.582,
|
577 |
+
"2": 0.762,
|
578 |
+
"3": 0.581
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.784,
|
582 |
+
"1": 0.612,
|
583 |
+
"2": 0.785,
|
584 |
+
"3": 0.671
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.847,
|
588 |
+
"1": 0.886,
|
589 |
+
"2": 0.813,
|
590 |
+
"3": 0.785
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.815,
|
594 |
+
"1": 0.948,
|
595 |
+
"2": 0.851,
|
596 |
+
"3": 0.627
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.823,
|
600 |
+
"1": 0.954,
|
601 |
+
"2": 0.847,
|
602 |
+
"3": 0.714
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.834,
|
606 |
+
"1": 0.958,
|
607 |
+
"2": 0.878,
|
608 |
+
"3": 0.819
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 0.999000072479248,
|
614 |
+
"fr": 1.0,
|
615 |
+
"de": 0.999000072479248,
|
616 |
+
"es": 1.0,
|
617 |
+
"nl": 1.0
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 1.0,
|
621 |
+
"fr": 1.0,
|
622 |
+
"de": 0.9980000257492065,
|
623 |
+
"es": 0.999000072479248,
|
624 |
+
"nl": 1.0
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.694,
|
628 |
+
"fr": 0.567,
|
629 |
+
"de": 0.808,
|
630 |
+
"es": 0.735,
|
631 |
+
"nl": 0.591
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.984,
|
635 |
+
"fr": 0.981,
|
636 |
+
"de": 0.813,
|
637 |
+
"es": 0.9,
|
638 |
+
"nl": 0.783
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.99,
|
642 |
+
"fr": 0.997,
|
643 |
+
"de": 0.998,
|
644 |
+
"es": 0.994,
|
645 |
+
"nl": 0.994
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 0.781,
|
649 |
+
"fr": 0.998,
|
650 |
+
"de": 1.0,
|
651 |
+
"es": 0.991,
|
652 |
+
"nl": 0.993
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 0.948,
|
656 |
+
"fr": 0.998,
|
657 |
+
"de": 0.999,
|
658 |
+
"es": 0.991,
|
659 |
+
"nl": 0.994
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 0.953,
|
663 |
+
"fr": 0.998,
|
664 |
+
"de": 0.999,
|
665 |
+
"es": 0.997,
|
666 |
+
"nl": 0.992
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_8_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 16,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "2cc2d25e-f0c7-4132-ba87-981967df14a0",
|
30 |
+
"datetime_epoch_millis": 1737047439415,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9328125406056642,
|
34 |
+
"llm_top_1_test_accuracy": 0.65114375,
|
35 |
+
"llm_top_2_test_accuracy": 0.730825,
|
36 |
+
"llm_top_5_test_accuracy": 0.8192687500000001,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9342812921851873,
|
44 |
+
"sae_top_1_test_accuracy": 0.74755,
|
45 |
+
"sae_top_2_test_accuracy": 0.80253125,
|
46 |
+
"sae_top_5_test_accuracy": 0.8575624999999999,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9536000490188599,
|
57 |
+
"llm_top_1_test_accuracy": 0.6024,
|
58 |
+
"llm_top_2_test_accuracy": 0.7208,
|
59 |
+
"llm_top_5_test_accuracy": 0.8072000000000001,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9522000312805176,
|
65 |
+
"sae_top_1_test_accuracy": 0.7812,
|
66 |
+
"sae_top_2_test_accuracy": 0.8244,
|
67 |
+
"sae_top_5_test_accuracy": 0.85,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9330000519752503,
|
76 |
+
"llm_top_1_test_accuracy": 0.6324,
|
77 |
+
"llm_top_2_test_accuracy": 0.6996,
|
78 |
+
"llm_top_5_test_accuracy": 0.8076000000000001,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9300000429153442,
|
84 |
+
"sae_top_1_test_accuracy": 0.723,
|
85 |
+
"sae_top_2_test_accuracy": 0.7692,
|
86 |
+
"sae_top_5_test_accuracy": 0.8108000000000001,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9112000465393066,
|
95 |
+
"llm_top_1_test_accuracy": 0.6488,
|
96 |
+
"llm_top_2_test_accuracy": 0.722,
|
97 |
+
"llm_top_5_test_accuracy": 0.7896,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.909600043296814,
|
103 |
+
"sae_top_1_test_accuracy": 0.7312000000000001,
|
104 |
+
"sae_top_2_test_accuracy": 0.7677999999999999,
|
105 |
+
"sae_top_5_test_accuracy": 0.7978,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.8668000340461731,
|
114 |
+
"llm_top_1_test_accuracy": 0.6412,
|
115 |
+
"llm_top_2_test_accuracy": 0.6869999999999999,
|
116 |
+
"llm_top_5_test_accuracy": 0.738,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.8746000289916992,
|
122 |
+
"sae_top_1_test_accuracy": 0.7068,
|
123 |
+
"sae_top_2_test_accuracy": 0.7294,
|
124 |
+
"sae_top_5_test_accuracy": 0.7868,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.8975000381469727,
|
133 |
+
"llm_top_1_test_accuracy": 0.64,
|
134 |
+
"llm_top_2_test_accuracy": 0.717,
|
135 |
+
"llm_top_5_test_accuracy": 0.789,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.9015000462532043,
|
141 |
+
"sae_top_1_test_accuracy": 0.596,
|
142 |
+
"sae_top_2_test_accuracy": 0.682,
|
143 |
+
"sae_top_5_test_accuracy": 0.831,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9640000343322754,
|
152 |
+
"llm_top_1_test_accuracy": 0.6885999999999999,
|
153 |
+
"llm_top_2_test_accuracy": 0.6950000000000001,
|
154 |
+
"llm_top_5_test_accuracy": 0.7953999999999999,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9664000630378723,
|
160 |
+
"sae_top_1_test_accuracy": 0.7367999999999999,
|
161 |
+
"sae_top_2_test_accuracy": 0.8206,
|
162 |
+
"sae_top_5_test_accuracy": 0.9057999999999999,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9370000511407852,
|
171 |
+
"llm_top_1_test_accuracy": 0.67675,
|
172 |
+
"llm_top_2_test_accuracy": 0.7130000000000001,
|
173 |
+
"llm_top_5_test_accuracy": 0.8327500000000001,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9407500475645065,
|
179 |
+
"sae_top_1_test_accuracy": 0.781,
|
180 |
+
"sae_top_2_test_accuracy": 0.84025,
|
181 |
+
"sae_top_5_test_accuracy": 0.8805,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9994000196456909,
|
190 |
+
"llm_top_1_test_accuracy": 0.6789999999999999,
|
191 |
+
"llm_top_2_test_accuracy": 0.8921999999999999,
|
192 |
+
"llm_top_5_test_accuracy": 0.9945999999999999,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9992000341415406,
|
198 |
+
"sae_top_1_test_accuracy": 0.9244,
|
199 |
+
"sae_top_2_test_accuracy": 0.9865999999999999,
|
200 |
+
"sae_top_5_test_accuracy": 0.9978,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_8",
|
210 |
+
"sae_lens_version": "5.3.1",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "pythia-160m-deduped",
|
213 |
+
"d_in": 768,
|
214 |
+
"d_sae": 4096,
|
215 |
+
"hook_layer": 8,
|
216 |
+
"hook_name": "blocks.8.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "float32",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": 499998720,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9230000376701355,
|
240 |
+
"1": 0.9430000185966492,
|
241 |
+
"2": 0.9360000491142273,
|
242 |
+
"6": 0.9890000224113464,
|
243 |
+
"9": 0.9700000286102295
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9230000376701355,
|
247 |
+
"1": 0.9530000686645508,
|
248 |
+
"2": 0.937000036239624,
|
249 |
+
"6": 0.9860000610351562,
|
250 |
+
"9": 0.9690000414848328
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.496,
|
254 |
+
"1": 0.574,
|
255 |
+
"2": 0.754,
|
256 |
+
"6": 0.632,
|
257 |
+
"9": 0.556
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.686,
|
261 |
+
"1": 0.613,
|
262 |
+
"2": 0.763,
|
263 |
+
"6": 0.8,
|
264 |
+
"9": 0.742
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.734,
|
268 |
+
"1": 0.755,
|
269 |
+
"2": 0.812,
|
270 |
+
"6": 0.855,
|
271 |
+
"9": 0.88
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.792,
|
275 |
+
"1": 0.611,
|
276 |
+
"2": 0.853,
|
277 |
+
"6": 0.743,
|
278 |
+
"9": 0.907
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.764,
|
282 |
+
"1": 0.634,
|
283 |
+
"2": 0.871,
|
284 |
+
"6": 0.939,
|
285 |
+
"9": 0.914
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.789,
|
289 |
+
"1": 0.71,
|
290 |
+
"2": 0.883,
|
291 |
+
"6": 0.945,
|
292 |
+
"9": 0.923
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9500000476837158,
|
298 |
+
"13": 0.9360000491142273,
|
299 |
+
"14": 0.9360000491142273,
|
300 |
+
"18": 0.8760000467300415,
|
301 |
+
"19": 0.9520000219345093
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9490000605583191,
|
305 |
+
"13": 0.9280000329017639,
|
306 |
+
"14": 0.9520000219345093,
|
307 |
+
"18": 0.8880000710487366,
|
308 |
+
"19": 0.9480000734329224
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.772,
|
312 |
+
"13": 0.61,
|
313 |
+
"14": 0.6,
|
314 |
+
"18": 0.603,
|
315 |
+
"19": 0.577
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.787,
|
319 |
+
"13": 0.619,
|
320 |
+
"14": 0.675,
|
321 |
+
"18": 0.711,
|
322 |
+
"19": 0.706
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.834,
|
326 |
+
"13": 0.798,
|
327 |
+
"14": 0.819,
|
328 |
+
"18": 0.747,
|
329 |
+
"19": 0.84
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.841,
|
333 |
+
"13": 0.596,
|
334 |
+
"14": 0.646,
|
335 |
+
"18": 0.685,
|
336 |
+
"19": 0.847
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.832,
|
340 |
+
"13": 0.744,
|
341 |
+
"14": 0.742,
|
342 |
+
"18": 0.661,
|
343 |
+
"19": 0.867
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.866,
|
347 |
+
"13": 0.744,
|
348 |
+
"14": 0.781,
|
349 |
+
"18": 0.789,
|
350 |
+
"19": 0.874
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9460000395774841,
|
356 |
+
"21": 0.8860000371932983,
|
357 |
+
"22": 0.8950000405311584,
|
358 |
+
"25": 0.9510000348091125,
|
359 |
+
"26": 0.8700000643730164
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9440000653266907,
|
363 |
+
"21": 0.8950000405311584,
|
364 |
+
"22": 0.8970000147819519,
|
365 |
+
"25": 0.9480000734329224,
|
366 |
+
"26": 0.8720000386238098
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.77,
|
370 |
+
"21": 0.782,
|
371 |
+
"22": 0.496,
|
372 |
+
"25": 0.632,
|
373 |
+
"26": 0.564
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.82,
|
377 |
+
"21": 0.801,
|
378 |
+
"22": 0.64,
|
379 |
+
"25": 0.751,
|
380 |
+
"26": 0.598
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.849,
|
384 |
+
"21": 0.816,
|
385 |
+
"22": 0.706,
|
386 |
+
"25": 0.836,
|
387 |
+
"26": 0.741
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.808,
|
391 |
+
"21": 0.615,
|
392 |
+
"22": 0.7,
|
393 |
+
"25": 0.854,
|
394 |
+
"26": 0.679
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.842,
|
398 |
+
"21": 0.694,
|
399 |
+
"22": 0.759,
|
400 |
+
"25": 0.877,
|
401 |
+
"26": 0.667
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.874,
|
405 |
+
"21": 0.71,
|
406 |
+
"22": 0.751,
|
407 |
+
"25": 0.878,
|
408 |
+
"26": 0.776
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9160000681877136,
|
414 |
+
"2": 0.8970000147819519,
|
415 |
+
"3": 0.8680000305175781,
|
416 |
+
"5": 0.8740000128746033,
|
417 |
+
"6": 0.8180000185966492
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9190000295639038,
|
421 |
+
"2": 0.8770000338554382,
|
422 |
+
"3": 0.8710000514984131,
|
423 |
+
"5": 0.859000027179718,
|
424 |
+
"6": 0.8080000281333923
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.685,
|
428 |
+
"2": 0.638,
|
429 |
+
"3": 0.641,
|
430 |
+
"5": 0.564,
|
431 |
+
"6": 0.678
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.762,
|
435 |
+
"2": 0.695,
|
436 |
+
"3": 0.654,
|
437 |
+
"5": 0.603,
|
438 |
+
"6": 0.721
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.804,
|
442 |
+
"2": 0.729,
|
443 |
+
"3": 0.725,
|
444 |
+
"5": 0.718,
|
445 |
+
"6": 0.714
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.792,
|
449 |
+
"2": 0.643,
|
450 |
+
"3": 0.692,
|
451 |
+
"5": 0.784,
|
452 |
+
"6": 0.623
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.782,
|
456 |
+
"2": 0.669,
|
457 |
+
"3": 0.686,
|
458 |
+
"5": 0.808,
|
459 |
+
"6": 0.702
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.858,
|
463 |
+
"2": 0.8,
|
464 |
+
"3": 0.73,
|
465 |
+
"5": 0.817,
|
466 |
+
"6": 0.729
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.9020000696182251,
|
472 |
+
"5.0": 0.9010000228881836
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.8980000615119934,
|
476 |
+
"5.0": 0.8970000147819519
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.64,
|
480 |
+
"5.0": 0.64
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.717,
|
484 |
+
"5.0": 0.717
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.789,
|
488 |
+
"5.0": 0.789
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.596,
|
492 |
+
"5.0": 0.596
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.682,
|
496 |
+
"5.0": 0.682
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.831,
|
500 |
+
"5.0": 0.831
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.9540000557899475,
|
506 |
+
"Python": 0.9780000448226929,
|
507 |
+
"HTML": 0.9850000739097595,
|
508 |
+
"Java": 0.9620000720024109,
|
509 |
+
"PHP": 0.9530000686645508
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.9610000252723694,
|
513 |
+
"Python": 0.9780000448226929,
|
514 |
+
"HTML": 0.984000027179718,
|
515 |
+
"Java": 0.9450000524520874,
|
516 |
+
"PHP": 0.9520000219345093
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.588,
|
520 |
+
"Python": 0.631,
|
521 |
+
"HTML": 0.923,
|
522 |
+
"Java": 0.642,
|
523 |
+
"PHP": 0.659
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.598,
|
527 |
+
"Python": 0.628,
|
528 |
+
"HTML": 0.926,
|
529 |
+
"Java": 0.657,
|
530 |
+
"PHP": 0.666
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.718,
|
534 |
+
"Python": 0.816,
|
535 |
+
"HTML": 0.938,
|
536 |
+
"Java": 0.757,
|
537 |
+
"PHP": 0.748
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.773,
|
541 |
+
"Python": 0.894,
|
542 |
+
"HTML": 0.632,
|
543 |
+
"Java": 0.727,
|
544 |
+
"PHP": 0.658
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.815,
|
548 |
+
"Python": 0.896,
|
549 |
+
"HTML": 0.765,
|
550 |
+
"Java": 0.734,
|
551 |
+
"PHP": 0.893
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.852,
|
555 |
+
"Python": 0.913,
|
556 |
+
"HTML": 0.958,
|
557 |
+
"Java": 0.907,
|
558 |
+
"PHP": 0.899
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9360000491142273,
|
564 |
+
"1": 0.9760000705718994,
|
565 |
+
"2": 0.9130000472068787,
|
566 |
+
"3": 0.9380000233650208
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9230000376701355,
|
570 |
+
"1": 0.971000075340271,
|
571 |
+
"2": 0.9190000295639038,
|
572 |
+
"3": 0.9350000619888306
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.782,
|
576 |
+
"1": 0.582,
|
577 |
+
"2": 0.762,
|
578 |
+
"3": 0.581
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.784,
|
582 |
+
"1": 0.612,
|
583 |
+
"2": 0.785,
|
584 |
+
"3": 0.671
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.847,
|
588 |
+
"1": 0.886,
|
589 |
+
"2": 0.813,
|
590 |
+
"3": 0.785
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.669,
|
594 |
+
"1": 0.96,
|
595 |
+
"2": 0.862,
|
596 |
+
"3": 0.633
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.844,
|
600 |
+
"1": 0.959,
|
601 |
+
"2": 0.865,
|
602 |
+
"3": 0.693
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.854,
|
606 |
+
"1": 0.961,
|
607 |
+
"2": 0.871,
|
608 |
+
"3": 0.836
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 0.999000072479248,
|
614 |
+
"fr": 1.0,
|
615 |
+
"de": 0.999000072479248,
|
616 |
+
"es": 1.0,
|
617 |
+
"nl": 0.9980000257492065
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 1.0,
|
621 |
+
"fr": 1.0,
|
622 |
+
"de": 0.9980000257492065,
|
623 |
+
"es": 0.999000072479248,
|
624 |
+
"nl": 1.0
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.694,
|
628 |
+
"fr": 0.567,
|
629 |
+
"de": 0.808,
|
630 |
+
"es": 0.735,
|
631 |
+
"nl": 0.591
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.984,
|
635 |
+
"fr": 0.981,
|
636 |
+
"de": 0.813,
|
637 |
+
"es": 0.9,
|
638 |
+
"nl": 0.783
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.99,
|
642 |
+
"fr": 0.997,
|
643 |
+
"de": 0.998,
|
644 |
+
"es": 0.994,
|
645 |
+
"nl": 0.994
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 0.639,
|
649 |
+
"fr": 0.998,
|
650 |
+
"de": 1.0,
|
651 |
+
"es": 0.991,
|
652 |
+
"nl": 0.994
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 0.947,
|
656 |
+
"fr": 0.998,
|
657 |
+
"de": 1.0,
|
658 |
+
"es": 0.993,
|
659 |
+
"nl": 0.995
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 1.0,
|
663 |
+
"fr": 0.998,
|
664 |
+
"de": 0.998,
|
665 |
+
"es": 0.998,
|
666 |
+
"nl": 0.995
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopK_pythia-160m-deduped__0108_resid_post_layer_8_trainer_9_eval_results.json
ADDED
@@ -0,0 +1,670 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 16,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5
|
26 |
+
],
|
27 |
+
"lower_vram_usage": false
|
28 |
+
},
|
29 |
+
"eval_id": "c7e816a4-c4b7-4d20-8202-d98e9bea9183",
|
30 |
+
"datetime_epoch_millis": 1737047251515,
|
31 |
+
"eval_result_metrics": {
|
32 |
+
"llm": {
|
33 |
+
"llm_test_accuracy": 0.9328125406056642,
|
34 |
+
"llm_top_1_test_accuracy": 0.65114375,
|
35 |
+
"llm_top_2_test_accuracy": 0.730825,
|
36 |
+
"llm_top_5_test_accuracy": 0.8192687500000001,
|
37 |
+
"llm_top_10_test_accuracy": null,
|
38 |
+
"llm_top_20_test_accuracy": null,
|
39 |
+
"llm_top_50_test_accuracy": null,
|
40 |
+
"llm_top_100_test_accuracy": null
|
41 |
+
},
|
42 |
+
"sae": {
|
43 |
+
"sae_test_accuracy": 0.9335500366985797,
|
44 |
+
"sae_top_1_test_accuracy": 0.772525,
|
45 |
+
"sae_top_2_test_accuracy": 0.82200625,
|
46 |
+
"sae_top_5_test_accuracy": 0.8540437500000001,
|
47 |
+
"sae_top_10_test_accuracy": null,
|
48 |
+
"sae_top_20_test_accuracy": null,
|
49 |
+
"sae_top_50_test_accuracy": null,
|
50 |
+
"sae_top_100_test_accuracy": null
|
51 |
+
}
|
52 |
+
},
|
53 |
+
"eval_result_details": [
|
54 |
+
{
|
55 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
56 |
+
"llm_test_accuracy": 0.9536000490188599,
|
57 |
+
"llm_top_1_test_accuracy": 0.6024,
|
58 |
+
"llm_top_2_test_accuracy": 0.7208,
|
59 |
+
"llm_top_5_test_accuracy": 0.8072000000000001,
|
60 |
+
"llm_top_10_test_accuracy": null,
|
61 |
+
"llm_top_20_test_accuracy": null,
|
62 |
+
"llm_top_50_test_accuracy": null,
|
63 |
+
"llm_top_100_test_accuracy": null,
|
64 |
+
"sae_test_accuracy": 0.9548000454902649,
|
65 |
+
"sae_top_1_test_accuracy": 0.7784000000000001,
|
66 |
+
"sae_top_2_test_accuracy": 0.8164,
|
67 |
+
"sae_top_5_test_accuracy": 0.8535999999999999,
|
68 |
+
"sae_top_10_test_accuracy": null,
|
69 |
+
"sae_top_20_test_accuracy": null,
|
70 |
+
"sae_top_50_test_accuracy": null,
|
71 |
+
"sae_top_100_test_accuracy": null
|
72 |
+
},
|
73 |
+
{
|
74 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
75 |
+
"llm_test_accuracy": 0.9330000519752503,
|
76 |
+
"llm_top_1_test_accuracy": 0.6324,
|
77 |
+
"llm_top_2_test_accuracy": 0.6996,
|
78 |
+
"llm_top_5_test_accuracy": 0.8076000000000001,
|
79 |
+
"llm_top_10_test_accuracy": null,
|
80 |
+
"llm_top_20_test_accuracy": null,
|
81 |
+
"llm_top_50_test_accuracy": null,
|
82 |
+
"llm_top_100_test_accuracy": null,
|
83 |
+
"sae_test_accuracy": 0.9320000410079956,
|
84 |
+
"sae_top_1_test_accuracy": 0.7628,
|
85 |
+
"sae_top_2_test_accuracy": 0.8103999999999999,
|
86 |
+
"sae_top_5_test_accuracy": 0.8282,
|
87 |
+
"sae_top_10_test_accuracy": null,
|
88 |
+
"sae_top_20_test_accuracy": null,
|
89 |
+
"sae_top_50_test_accuracy": null,
|
90 |
+
"sae_top_100_test_accuracy": null
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
94 |
+
"llm_test_accuracy": 0.9112000465393066,
|
95 |
+
"llm_top_1_test_accuracy": 0.6488,
|
96 |
+
"llm_top_2_test_accuracy": 0.722,
|
97 |
+
"llm_top_5_test_accuracy": 0.7896,
|
98 |
+
"llm_top_10_test_accuracy": null,
|
99 |
+
"llm_top_20_test_accuracy": null,
|
100 |
+
"llm_top_50_test_accuracy": null,
|
101 |
+
"llm_top_100_test_accuracy": null,
|
102 |
+
"sae_test_accuracy": 0.910200035572052,
|
103 |
+
"sae_top_1_test_accuracy": 0.7558,
|
104 |
+
"sae_top_2_test_accuracy": 0.7741999999999999,
|
105 |
+
"sae_top_5_test_accuracy": 0.8076000000000001,
|
106 |
+
"sae_top_10_test_accuracy": null,
|
107 |
+
"sae_top_20_test_accuracy": null,
|
108 |
+
"sae_top_50_test_accuracy": null,
|
109 |
+
"sae_top_100_test_accuracy": null
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
113 |
+
"llm_test_accuracy": 0.8668000340461731,
|
114 |
+
"llm_top_1_test_accuracy": 0.6412,
|
115 |
+
"llm_top_2_test_accuracy": 0.6869999999999999,
|
116 |
+
"llm_top_5_test_accuracy": 0.738,
|
117 |
+
"llm_top_10_test_accuracy": null,
|
118 |
+
"llm_top_20_test_accuracy": null,
|
119 |
+
"llm_top_50_test_accuracy": null,
|
120 |
+
"llm_top_100_test_accuracy": null,
|
121 |
+
"sae_test_accuracy": 0.8798000454902649,
|
122 |
+
"sae_top_1_test_accuracy": 0.7293999999999999,
|
123 |
+
"sae_top_2_test_accuracy": 0.7318,
|
124 |
+
"sae_top_5_test_accuracy": 0.7668,
|
125 |
+
"sae_top_10_test_accuracy": null,
|
126 |
+
"sae_top_20_test_accuracy": null,
|
127 |
+
"sae_top_50_test_accuracy": null,
|
128 |
+
"sae_top_100_test_accuracy": null
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
132 |
+
"llm_test_accuracy": 0.8975000381469727,
|
133 |
+
"llm_top_1_test_accuracy": 0.64,
|
134 |
+
"llm_top_2_test_accuracy": 0.717,
|
135 |
+
"llm_top_5_test_accuracy": 0.789,
|
136 |
+
"llm_top_10_test_accuracy": null,
|
137 |
+
"llm_top_20_test_accuracy": null,
|
138 |
+
"llm_top_50_test_accuracy": null,
|
139 |
+
"llm_top_100_test_accuracy": null,
|
140 |
+
"sae_test_accuracy": 0.8960000276565552,
|
141 |
+
"sae_top_1_test_accuracy": 0.579,
|
142 |
+
"sae_top_2_test_accuracy": 0.757,
|
143 |
+
"sae_top_5_test_accuracy": 0.841,
|
144 |
+
"sae_top_10_test_accuracy": null,
|
145 |
+
"sae_top_20_test_accuracy": null,
|
146 |
+
"sae_top_50_test_accuracy": null,
|
147 |
+
"sae_top_100_test_accuracy": null
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"dataset_name": "codeparrot/github-code_results",
|
151 |
+
"llm_test_accuracy": 0.9640000343322754,
|
152 |
+
"llm_top_1_test_accuracy": 0.6885999999999999,
|
153 |
+
"llm_top_2_test_accuracy": 0.6950000000000001,
|
154 |
+
"llm_top_5_test_accuracy": 0.7953999999999999,
|
155 |
+
"llm_top_10_test_accuracy": null,
|
156 |
+
"llm_top_20_test_accuracy": null,
|
157 |
+
"llm_top_50_test_accuracy": null,
|
158 |
+
"llm_top_100_test_accuracy": null,
|
159 |
+
"sae_test_accuracy": 0.9612000465393067,
|
160 |
+
"sae_top_1_test_accuracy": 0.7754000000000001,
|
161 |
+
"sae_top_2_test_accuracy": 0.8336,
|
162 |
+
"sae_top_5_test_accuracy": 0.8710000000000001,
|
163 |
+
"sae_top_10_test_accuracy": null,
|
164 |
+
"sae_top_20_test_accuracy": null,
|
165 |
+
"sae_top_50_test_accuracy": null,
|
166 |
+
"sae_top_100_test_accuracy": null
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
170 |
+
"llm_test_accuracy": 0.9370000511407852,
|
171 |
+
"llm_top_1_test_accuracy": 0.67675,
|
172 |
+
"llm_top_2_test_accuracy": 0.7130000000000001,
|
173 |
+
"llm_top_5_test_accuracy": 0.8327500000000001,
|
174 |
+
"llm_top_10_test_accuracy": null,
|
175 |
+
"llm_top_20_test_accuracy": null,
|
176 |
+
"llm_top_50_test_accuracy": null,
|
177 |
+
"llm_top_100_test_accuracy": null,
|
178 |
+
"sae_test_accuracy": 0.9350000321865082,
|
179 |
+
"sae_top_1_test_accuracy": 0.8240000000000001,
|
180 |
+
"sae_top_2_test_accuracy": 0.85425,
|
181 |
+
"sae_top_5_test_accuracy": 0.86575,
|
182 |
+
"sae_top_10_test_accuracy": null,
|
183 |
+
"sae_top_20_test_accuracy": null,
|
184 |
+
"sae_top_50_test_accuracy": null,
|
185 |
+
"sae_top_100_test_accuracy": null
|
186 |
+
},
|
187 |
+
{
|
188 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
189 |
+
"llm_test_accuracy": 0.9994000196456909,
|
190 |
+
"llm_top_1_test_accuracy": 0.6789999999999999,
|
191 |
+
"llm_top_2_test_accuracy": 0.8921999999999999,
|
192 |
+
"llm_top_5_test_accuracy": 0.9945999999999999,
|
193 |
+
"llm_top_10_test_accuracy": null,
|
194 |
+
"llm_top_20_test_accuracy": null,
|
195 |
+
"llm_top_50_test_accuracy": null,
|
196 |
+
"llm_top_100_test_accuracy": null,
|
197 |
+
"sae_test_accuracy": 0.9994000196456909,
|
198 |
+
"sae_top_1_test_accuracy": 0.9753999999999999,
|
199 |
+
"sae_top_2_test_accuracy": 0.9984,
|
200 |
+
"sae_top_5_test_accuracy": 0.9984,
|
201 |
+
"sae_top_10_test_accuracy": null,
|
202 |
+
"sae_top_20_test_accuracy": null,
|
203 |
+
"sae_top_50_test_accuracy": null,
|
204 |
+
"sae_top_100_test_accuracy": null
|
205 |
+
}
|
206 |
+
],
|
207 |
+
"sae_bench_commit_hash": "141aff72928f7588c1451bed47c401e1d565d471",
|
208 |
+
"sae_lens_id": "custom_sae",
|
209 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_BatchTopKTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_9",
|
210 |
+
"sae_lens_version": "5.3.1",
|
211 |
+
"sae_cfg_dict": {
|
212 |
+
"model_name": "pythia-160m-deduped",
|
213 |
+
"d_in": 768,
|
214 |
+
"d_sae": 4096,
|
215 |
+
"hook_layer": 8,
|
216 |
+
"hook_name": "blocks.8.hook_resid_post",
|
217 |
+
"context_size": null,
|
218 |
+
"hook_head_index": null,
|
219 |
+
"architecture": "batch_topk",
|
220 |
+
"apply_b_dec_to_input": null,
|
221 |
+
"finetuning_scaling_factor": null,
|
222 |
+
"activation_fn_str": "",
|
223 |
+
"prepend_bos": true,
|
224 |
+
"normalize_activations": "none",
|
225 |
+
"dtype": "float32",
|
226 |
+
"device": "",
|
227 |
+
"dataset_path": "",
|
228 |
+
"dataset_trust_remote_code": true,
|
229 |
+
"seqpos_slice": [
|
230 |
+
null
|
231 |
+
],
|
232 |
+
"training_tokens": 499998720,
|
233 |
+
"sae_lens_training_version": null,
|
234 |
+
"neuronpedia_id": null
|
235 |
+
},
|
236 |
+
"eval_result_unstructured": {
|
237 |
+
"LabHC/bias_in_bios_class_set1_results": {
|
238 |
+
"sae_test_accuracy": {
|
239 |
+
"0": 0.9290000200271606,
|
240 |
+
"1": 0.9440000653266907,
|
241 |
+
"2": 0.940000057220459,
|
242 |
+
"6": 0.9880000352859497,
|
243 |
+
"9": 0.9730000495910645
|
244 |
+
},
|
245 |
+
"llm_test_accuracy": {
|
246 |
+
"0": 0.9230000376701355,
|
247 |
+
"1": 0.9530000686645508,
|
248 |
+
"2": 0.937000036239624,
|
249 |
+
"6": 0.9860000610351562,
|
250 |
+
"9": 0.9690000414848328
|
251 |
+
},
|
252 |
+
"llm_top_1_test_accuracy": {
|
253 |
+
"0": 0.496,
|
254 |
+
"1": 0.574,
|
255 |
+
"2": 0.754,
|
256 |
+
"6": 0.632,
|
257 |
+
"9": 0.556
|
258 |
+
},
|
259 |
+
"llm_top_2_test_accuracy": {
|
260 |
+
"0": 0.686,
|
261 |
+
"1": 0.613,
|
262 |
+
"2": 0.763,
|
263 |
+
"6": 0.8,
|
264 |
+
"9": 0.742
|
265 |
+
},
|
266 |
+
"llm_top_5_test_accuracy": {
|
267 |
+
"0": 0.734,
|
268 |
+
"1": 0.755,
|
269 |
+
"2": 0.812,
|
270 |
+
"6": 0.855,
|
271 |
+
"9": 0.88
|
272 |
+
},
|
273 |
+
"sae_top_1_test_accuracy": {
|
274 |
+
"0": 0.779,
|
275 |
+
"1": 0.622,
|
276 |
+
"2": 0.621,
|
277 |
+
"6": 0.939,
|
278 |
+
"9": 0.931
|
279 |
+
},
|
280 |
+
"sae_top_2_test_accuracy": {
|
281 |
+
"0": 0.777,
|
282 |
+
"1": 0.712,
|
283 |
+
"2": 0.725,
|
284 |
+
"6": 0.94,
|
285 |
+
"9": 0.928
|
286 |
+
},
|
287 |
+
"sae_top_5_test_accuracy": {
|
288 |
+
"0": 0.787,
|
289 |
+
"1": 0.802,
|
290 |
+
"2": 0.807,
|
291 |
+
"6": 0.945,
|
292 |
+
"9": 0.927
|
293 |
+
}
|
294 |
+
},
|
295 |
+
"LabHC/bias_in_bios_class_set2_results": {
|
296 |
+
"sae_test_accuracy": {
|
297 |
+
"11": 0.9440000653266907,
|
298 |
+
"13": 0.9330000281333923,
|
299 |
+
"14": 0.937000036239624,
|
300 |
+
"18": 0.8890000581741333,
|
301 |
+
"19": 0.9570000171661377
|
302 |
+
},
|
303 |
+
"llm_test_accuracy": {
|
304 |
+
"11": 0.9490000605583191,
|
305 |
+
"13": 0.9280000329017639,
|
306 |
+
"14": 0.9520000219345093,
|
307 |
+
"18": 0.8880000710487366,
|
308 |
+
"19": 0.9480000734329224
|
309 |
+
},
|
310 |
+
"llm_top_1_test_accuracy": {
|
311 |
+
"11": 0.772,
|
312 |
+
"13": 0.61,
|
313 |
+
"14": 0.6,
|
314 |
+
"18": 0.603,
|
315 |
+
"19": 0.577
|
316 |
+
},
|
317 |
+
"llm_top_2_test_accuracy": {
|
318 |
+
"11": 0.787,
|
319 |
+
"13": 0.619,
|
320 |
+
"14": 0.675,
|
321 |
+
"18": 0.711,
|
322 |
+
"19": 0.706
|
323 |
+
},
|
324 |
+
"llm_top_5_test_accuracy": {
|
325 |
+
"11": 0.834,
|
326 |
+
"13": 0.798,
|
327 |
+
"14": 0.819,
|
328 |
+
"18": 0.747,
|
329 |
+
"19": 0.84
|
330 |
+
},
|
331 |
+
"sae_top_1_test_accuracy": {
|
332 |
+
"11": 0.709,
|
333 |
+
"13": 0.811,
|
334 |
+
"14": 0.791,
|
335 |
+
"18": 0.655,
|
336 |
+
"19": 0.848
|
337 |
+
},
|
338 |
+
"sae_top_2_test_accuracy": {
|
339 |
+
"11": 0.846,
|
340 |
+
"13": 0.827,
|
341 |
+
"14": 0.779,
|
342 |
+
"18": 0.742,
|
343 |
+
"19": 0.858
|
344 |
+
},
|
345 |
+
"sae_top_5_test_accuracy": {
|
346 |
+
"11": 0.896,
|
347 |
+
"13": 0.81,
|
348 |
+
"14": 0.798,
|
349 |
+
"18": 0.774,
|
350 |
+
"19": 0.863
|
351 |
+
}
|
352 |
+
},
|
353 |
+
"LabHC/bias_in_bios_class_set3_results": {
|
354 |
+
"sae_test_accuracy": {
|
355 |
+
"20": 0.9440000653266907,
|
356 |
+
"21": 0.8870000243186951,
|
357 |
+
"22": 0.8960000276565552,
|
358 |
+
"25": 0.9510000348091125,
|
359 |
+
"26": 0.8730000257492065
|
360 |
+
},
|
361 |
+
"llm_test_accuracy": {
|
362 |
+
"20": 0.9440000653266907,
|
363 |
+
"21": 0.8950000405311584,
|
364 |
+
"22": 0.8970000147819519,
|
365 |
+
"25": 0.9480000734329224,
|
366 |
+
"26": 0.8720000386238098
|
367 |
+
},
|
368 |
+
"llm_top_1_test_accuracy": {
|
369 |
+
"20": 0.77,
|
370 |
+
"21": 0.782,
|
371 |
+
"22": 0.496,
|
372 |
+
"25": 0.632,
|
373 |
+
"26": 0.564
|
374 |
+
},
|
375 |
+
"llm_top_2_test_accuracy": {
|
376 |
+
"20": 0.82,
|
377 |
+
"21": 0.801,
|
378 |
+
"22": 0.64,
|
379 |
+
"25": 0.751,
|
380 |
+
"26": 0.598
|
381 |
+
},
|
382 |
+
"llm_top_5_test_accuracy": {
|
383 |
+
"20": 0.849,
|
384 |
+
"21": 0.816,
|
385 |
+
"22": 0.706,
|
386 |
+
"25": 0.836,
|
387 |
+
"26": 0.741
|
388 |
+
},
|
389 |
+
"sae_top_1_test_accuracy": {
|
390 |
+
"20": 0.861,
|
391 |
+
"21": 0.686,
|
392 |
+
"22": 0.709,
|
393 |
+
"25": 0.836,
|
394 |
+
"26": 0.687
|
395 |
+
},
|
396 |
+
"sae_top_2_test_accuracy": {
|
397 |
+
"20": 0.882,
|
398 |
+
"21": 0.698,
|
399 |
+
"22": 0.696,
|
400 |
+
"25": 0.836,
|
401 |
+
"26": 0.759
|
402 |
+
},
|
403 |
+
"sae_top_5_test_accuracy": {
|
404 |
+
"20": 0.881,
|
405 |
+
"21": 0.796,
|
406 |
+
"22": 0.716,
|
407 |
+
"25": 0.866,
|
408 |
+
"26": 0.779
|
409 |
+
}
|
410 |
+
},
|
411 |
+
"canrager/amazon_reviews_mcauley_1and5_results": {
|
412 |
+
"sae_test_accuracy": {
|
413 |
+
"1": 0.9300000667572021,
|
414 |
+
"2": 0.8960000276565552,
|
415 |
+
"3": 0.8820000290870667,
|
416 |
+
"5": 0.8750000596046448,
|
417 |
+
"6": 0.8160000443458557
|
418 |
+
},
|
419 |
+
"llm_test_accuracy": {
|
420 |
+
"1": 0.9190000295639038,
|
421 |
+
"2": 0.8770000338554382,
|
422 |
+
"3": 0.8710000514984131,
|
423 |
+
"5": 0.859000027179718,
|
424 |
+
"6": 0.8080000281333923
|
425 |
+
},
|
426 |
+
"llm_top_1_test_accuracy": {
|
427 |
+
"1": 0.685,
|
428 |
+
"2": 0.638,
|
429 |
+
"3": 0.641,
|
430 |
+
"5": 0.564,
|
431 |
+
"6": 0.678
|
432 |
+
},
|
433 |
+
"llm_top_2_test_accuracy": {
|
434 |
+
"1": 0.762,
|
435 |
+
"2": 0.695,
|
436 |
+
"3": 0.654,
|
437 |
+
"5": 0.603,
|
438 |
+
"6": 0.721
|
439 |
+
},
|
440 |
+
"llm_top_5_test_accuracy": {
|
441 |
+
"1": 0.804,
|
442 |
+
"2": 0.729,
|
443 |
+
"3": 0.725,
|
444 |
+
"5": 0.718,
|
445 |
+
"6": 0.714
|
446 |
+
},
|
447 |
+
"sae_top_1_test_accuracy": {
|
448 |
+
"1": 0.788,
|
449 |
+
"2": 0.642,
|
450 |
+
"3": 0.71,
|
451 |
+
"5": 0.804,
|
452 |
+
"6": 0.703
|
453 |
+
},
|
454 |
+
"sae_top_2_test_accuracy": {
|
455 |
+
"1": 0.79,
|
456 |
+
"2": 0.652,
|
457 |
+
"3": 0.712,
|
458 |
+
"5": 0.8,
|
459 |
+
"6": 0.705
|
460 |
+
},
|
461 |
+
"sae_top_5_test_accuracy": {
|
462 |
+
"1": 0.827,
|
463 |
+
"2": 0.703,
|
464 |
+
"3": 0.762,
|
465 |
+
"5": 0.812,
|
466 |
+
"6": 0.73
|
467 |
+
}
|
468 |
+
},
|
469 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment_results": {
|
470 |
+
"sae_test_accuracy": {
|
471 |
+
"1.0": 0.9010000228881836,
|
472 |
+
"5.0": 0.8910000324249268
|
473 |
+
},
|
474 |
+
"llm_test_accuracy": {
|
475 |
+
"1.0": 0.8980000615119934,
|
476 |
+
"5.0": 0.8970000147819519
|
477 |
+
},
|
478 |
+
"llm_top_1_test_accuracy": {
|
479 |
+
"1.0": 0.64,
|
480 |
+
"5.0": 0.64
|
481 |
+
},
|
482 |
+
"llm_top_2_test_accuracy": {
|
483 |
+
"1.0": 0.717,
|
484 |
+
"5.0": 0.717
|
485 |
+
},
|
486 |
+
"llm_top_5_test_accuracy": {
|
487 |
+
"1.0": 0.789,
|
488 |
+
"5.0": 0.789
|
489 |
+
},
|
490 |
+
"sae_top_1_test_accuracy": {
|
491 |
+
"1.0": 0.579,
|
492 |
+
"5.0": 0.579
|
493 |
+
},
|
494 |
+
"sae_top_2_test_accuracy": {
|
495 |
+
"1.0": 0.757,
|
496 |
+
"5.0": 0.757
|
497 |
+
},
|
498 |
+
"sae_top_5_test_accuracy": {
|
499 |
+
"1.0": 0.841,
|
500 |
+
"5.0": 0.841
|
501 |
+
}
|
502 |
+
},
|
503 |
+
"codeparrot/github-code_results": {
|
504 |
+
"sae_test_accuracy": {
|
505 |
+
"C": 0.940000057220459,
|
506 |
+
"Python": 0.9810000658035278,
|
507 |
+
"HTML": 0.9800000190734863,
|
508 |
+
"Java": 0.9510000348091125,
|
509 |
+
"PHP": 0.9540000557899475
|
510 |
+
},
|
511 |
+
"llm_test_accuracy": {
|
512 |
+
"C": 0.9610000252723694,
|
513 |
+
"Python": 0.9780000448226929,
|
514 |
+
"HTML": 0.984000027179718,
|
515 |
+
"Java": 0.9450000524520874,
|
516 |
+
"PHP": 0.9520000219345093
|
517 |
+
},
|
518 |
+
"llm_top_1_test_accuracy": {
|
519 |
+
"C": 0.588,
|
520 |
+
"Python": 0.631,
|
521 |
+
"HTML": 0.923,
|
522 |
+
"Java": 0.642,
|
523 |
+
"PHP": 0.659
|
524 |
+
},
|
525 |
+
"llm_top_2_test_accuracy": {
|
526 |
+
"C": 0.598,
|
527 |
+
"Python": 0.628,
|
528 |
+
"HTML": 0.926,
|
529 |
+
"Java": 0.657,
|
530 |
+
"PHP": 0.666
|
531 |
+
},
|
532 |
+
"llm_top_5_test_accuracy": {
|
533 |
+
"C": 0.718,
|
534 |
+
"Python": 0.816,
|
535 |
+
"HTML": 0.938,
|
536 |
+
"Java": 0.757,
|
537 |
+
"PHP": 0.748
|
538 |
+
},
|
539 |
+
"sae_top_1_test_accuracy": {
|
540 |
+
"C": 0.734,
|
541 |
+
"Python": 0.891,
|
542 |
+
"HTML": 0.761,
|
543 |
+
"Java": 0.604,
|
544 |
+
"PHP": 0.887
|
545 |
+
},
|
546 |
+
"sae_top_2_test_accuracy": {
|
547 |
+
"C": 0.834,
|
548 |
+
"Python": 0.896,
|
549 |
+
"HTML": 0.818,
|
550 |
+
"Java": 0.741,
|
551 |
+
"PHP": 0.879
|
552 |
+
},
|
553 |
+
"sae_top_5_test_accuracy": {
|
554 |
+
"C": 0.85,
|
555 |
+
"Python": 0.911,
|
556 |
+
"HTML": 0.945,
|
557 |
+
"Java": 0.754,
|
558 |
+
"PHP": 0.895
|
559 |
+
}
|
560 |
+
},
|
561 |
+
"fancyzhx/ag_news_results": {
|
562 |
+
"sae_test_accuracy": {
|
563 |
+
"0": 0.9190000295639038,
|
564 |
+
"1": 0.9730000495910645,
|
565 |
+
"2": 0.9140000343322754,
|
566 |
+
"3": 0.9340000152587891
|
567 |
+
},
|
568 |
+
"llm_test_accuracy": {
|
569 |
+
"0": 0.9230000376701355,
|
570 |
+
"1": 0.971000075340271,
|
571 |
+
"2": 0.9190000295639038,
|
572 |
+
"3": 0.9350000619888306
|
573 |
+
},
|
574 |
+
"llm_top_1_test_accuracy": {
|
575 |
+
"0": 0.782,
|
576 |
+
"1": 0.582,
|
577 |
+
"2": 0.762,
|
578 |
+
"3": 0.581
|
579 |
+
},
|
580 |
+
"llm_top_2_test_accuracy": {
|
581 |
+
"0": 0.784,
|
582 |
+
"1": 0.612,
|
583 |
+
"2": 0.785,
|
584 |
+
"3": 0.671
|
585 |
+
},
|
586 |
+
"llm_top_5_test_accuracy": {
|
587 |
+
"0": 0.847,
|
588 |
+
"1": 0.886,
|
589 |
+
"2": 0.813,
|
590 |
+
"3": 0.785
|
591 |
+
},
|
592 |
+
"sae_top_1_test_accuracy": {
|
593 |
+
"0": 0.806,
|
594 |
+
"1": 0.962,
|
595 |
+
"2": 0.86,
|
596 |
+
"3": 0.668
|
597 |
+
},
|
598 |
+
"sae_top_2_test_accuracy": {
|
599 |
+
"0": 0.838,
|
600 |
+
"1": 0.96,
|
601 |
+
"2": 0.855,
|
602 |
+
"3": 0.764
|
603 |
+
},
|
604 |
+
"sae_top_5_test_accuracy": {
|
605 |
+
"0": 0.873,
|
606 |
+
"1": 0.959,
|
607 |
+
"2": 0.867,
|
608 |
+
"3": 0.764
|
609 |
+
}
|
610 |
+
},
|
611 |
+
"Helsinki-NLP/europarl_results": {
|
612 |
+
"sae_test_accuracy": {
|
613 |
+
"en": 1.0,
|
614 |
+
"fr": 1.0,
|
615 |
+
"de": 0.9980000257492065,
|
616 |
+
"es": 1.0,
|
617 |
+
"nl": 0.999000072479248
|
618 |
+
},
|
619 |
+
"llm_test_accuracy": {
|
620 |
+
"en": 1.0,
|
621 |
+
"fr": 1.0,
|
622 |
+
"de": 0.9980000257492065,
|
623 |
+
"es": 0.999000072479248,
|
624 |
+
"nl": 1.0
|
625 |
+
},
|
626 |
+
"llm_top_1_test_accuracy": {
|
627 |
+
"en": 0.694,
|
628 |
+
"fr": 0.567,
|
629 |
+
"de": 0.808,
|
630 |
+
"es": 0.735,
|
631 |
+
"nl": 0.591
|
632 |
+
},
|
633 |
+
"llm_top_2_test_accuracy": {
|
634 |
+
"en": 0.984,
|
635 |
+
"fr": 0.981,
|
636 |
+
"de": 0.813,
|
637 |
+
"es": 0.9,
|
638 |
+
"nl": 0.783
|
639 |
+
},
|
640 |
+
"llm_top_5_test_accuracy": {
|
641 |
+
"en": 0.99,
|
642 |
+
"fr": 0.997,
|
643 |
+
"de": 0.998,
|
644 |
+
"es": 0.994,
|
645 |
+
"nl": 0.994
|
646 |
+
},
|
647 |
+
"sae_top_1_test_accuracy": {
|
648 |
+
"en": 0.982,
|
649 |
+
"fr": 0.998,
|
650 |
+
"de": 0.999,
|
651 |
+
"es": 0.997,
|
652 |
+
"nl": 0.901
|
653 |
+
},
|
654 |
+
"sae_top_2_test_accuracy": {
|
655 |
+
"en": 1.0,
|
656 |
+
"fr": 0.998,
|
657 |
+
"de": 1.0,
|
658 |
+
"es": 0.997,
|
659 |
+
"nl": 0.997
|
660 |
+
},
|
661 |
+
"sae_top_5_test_accuracy": {
|
662 |
+
"en": 0.999,
|
663 |
+
"fr": 0.998,
|
664 |
+
"de": 1.0,
|
665 |
+
"es": 0.998,
|
666 |
+
"nl": 0.997
|
667 |
+
}
|
668 |
+
}
|
669 |
+
}
|
670 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_12_eval_results.json
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5,
|
26 |
+
10,
|
27 |
+
20,
|
28 |
+
50
|
29 |
+
],
|
30 |
+
"lower_vram_usage": false
|
31 |
+
},
|
32 |
+
"eval_id": "0a736a45-ecd0-4ce1-a9c1-5183fdd78e85",
|
33 |
+
"datetime_epoch_millis": 1736493330745,
|
34 |
+
"eval_result_metrics": {
|
35 |
+
"llm": {
|
36 |
+
"llm_test_accuracy": 0.9292125,
|
37 |
+
"llm_top_1_test_accuracy": 0.6425625,
|
38 |
+
"llm_top_2_test_accuracy": 0.7349125000000001,
|
39 |
+
"llm_top_5_test_accuracy": 0.8187187499999999,
|
40 |
+
"llm_top_10_test_accuracy": 0.8505625000000001,
|
41 |
+
"llm_top_20_test_accuracy": 0.8723249999999999,
|
42 |
+
"llm_top_50_test_accuracy": 0.9006,
|
43 |
+
"llm_top_100_test_accuracy": null
|
44 |
+
},
|
45 |
+
"sae": {
|
46 |
+
"sae_test_accuracy": 0.9312875386327505,
|
47 |
+
"sae_top_1_test_accuracy": 0.7658500000000001,
|
48 |
+
"sae_top_2_test_accuracy": 0.8047500000000001,
|
49 |
+
"sae_top_5_test_accuracy": 0.84899375,
|
50 |
+
"sae_top_10_test_accuracy": 0.8715437500000001,
|
51 |
+
"sae_top_20_test_accuracy": 0.88605,
|
52 |
+
"sae_top_50_test_accuracy": 0.90546875,
|
53 |
+
"sae_top_100_test_accuracy": null
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"eval_result_details": [
|
57 |
+
{
|
58 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
59 |
+
"llm_test_accuracy": 0.9512,
|
60 |
+
"llm_top_1_test_accuracy": 0.6045999999999999,
|
61 |
+
"llm_top_2_test_accuracy": 0.74,
|
62 |
+
"llm_top_5_test_accuracy": 0.7994,
|
63 |
+
"llm_top_10_test_accuracy": 0.8628,
|
64 |
+
"llm_top_20_test_accuracy": 0.8812000000000001,
|
65 |
+
"llm_top_50_test_accuracy": 0.921,
|
66 |
+
"llm_top_100_test_accuracy": null,
|
67 |
+
"sae_test_accuracy": 0.9500000357627869,
|
68 |
+
"sae_top_1_test_accuracy": 0.7854000000000001,
|
69 |
+
"sae_top_2_test_accuracy": 0.8386000000000001,
|
70 |
+
"sae_top_5_test_accuracy": 0.8539999999999999,
|
71 |
+
"sae_top_10_test_accuracy": 0.884,
|
72 |
+
"sae_top_20_test_accuracy": 0.8968,
|
73 |
+
"sae_top_50_test_accuracy": 0.9202,
|
74 |
+
"sae_top_100_test_accuracy": null
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
78 |
+
"llm_test_accuracy": 0.9308,
|
79 |
+
"llm_top_1_test_accuracy": 0.5818,
|
80 |
+
"llm_top_2_test_accuracy": 0.7078,
|
81 |
+
"llm_top_5_test_accuracy": 0.8074,
|
82 |
+
"llm_top_10_test_accuracy": 0.8343999999999999,
|
83 |
+
"llm_top_20_test_accuracy": 0.8556000000000001,
|
84 |
+
"llm_top_50_test_accuracy": 0.8896000000000001,
|
85 |
+
"llm_top_100_test_accuracy": null,
|
86 |
+
"sae_test_accuracy": 0.9314000368118286,
|
87 |
+
"sae_top_1_test_accuracy": 0.7616000000000002,
|
88 |
+
"sae_top_2_test_accuracy": 0.7754,
|
89 |
+
"sae_top_5_test_accuracy": 0.8,
|
90 |
+
"sae_top_10_test_accuracy": 0.8423999999999999,
|
91 |
+
"sae_top_20_test_accuracy": 0.8636000000000001,
|
92 |
+
"sae_top_50_test_accuracy": 0.8977999999999999,
|
93 |
+
"sae_top_100_test_accuracy": null
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
97 |
+
"llm_test_accuracy": 0.9016,
|
98 |
+
"llm_top_1_test_accuracy": 0.6472,
|
99 |
+
"llm_top_2_test_accuracy": 0.7143999999999999,
|
100 |
+
"llm_top_5_test_accuracy": 0.7936,
|
101 |
+
"llm_top_10_test_accuracy": 0.8148,
|
102 |
+
"llm_top_20_test_accuracy": 0.8454,
|
103 |
+
"llm_top_50_test_accuracy": 0.8714000000000001,
|
104 |
+
"llm_top_100_test_accuracy": null,
|
105 |
+
"sae_test_accuracy": 0.9112000465393066,
|
106 |
+
"sae_top_1_test_accuracy": 0.77,
|
107 |
+
"sae_top_2_test_accuracy": 0.7670000000000001,
|
108 |
+
"sae_top_5_test_accuracy": 0.8044,
|
109 |
+
"sae_top_10_test_accuracy": 0.834,
|
110 |
+
"sae_top_20_test_accuracy": 0.8684,
|
111 |
+
"sae_top_50_test_accuracy": 0.8691999999999999,
|
112 |
+
"sae_top_100_test_accuracy": null
|
113 |
+
},
|
114 |
+
{
|
115 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
116 |
+
"llm_test_accuracy": 0.8695999999999999,
|
117 |
+
"llm_top_1_test_accuracy": 0.617,
|
118 |
+
"llm_top_2_test_accuracy": 0.6884,
|
119 |
+
"llm_top_5_test_accuracy": 0.738,
|
120 |
+
"llm_top_10_test_accuracy": 0.7702,
|
121 |
+
"llm_top_20_test_accuracy": 0.7857999999999999,
|
122 |
+
"llm_top_50_test_accuracy": 0.826,
|
123 |
+
"llm_top_100_test_accuracy": null,
|
124 |
+
"sae_test_accuracy": 0.8718000411987304,
|
125 |
+
"sae_top_1_test_accuracy": 0.7184,
|
126 |
+
"sae_top_2_test_accuracy": 0.7638,
|
127 |
+
"sae_top_5_test_accuracy": 0.7954000000000001,
|
128 |
+
"sae_top_10_test_accuracy": 0.8064,
|
129 |
+
"sae_top_20_test_accuracy": 0.8198000000000001,
|
130 |
+
"sae_top_50_test_accuracy": 0.8347999999999999,
|
131 |
+
"sae_top_100_test_accuracy": null
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
135 |
+
"llm_test_accuracy": 0.8915,
|
136 |
+
"llm_top_1_test_accuracy": 0.64,
|
137 |
+
"llm_top_2_test_accuracy": 0.717,
|
138 |
+
"llm_top_5_test_accuracy": 0.789,
|
139 |
+
"llm_top_10_test_accuracy": 0.787,
|
140 |
+
"llm_top_20_test_accuracy": 0.813,
|
141 |
+
"llm_top_50_test_accuracy": 0.856,
|
142 |
+
"llm_top_100_test_accuracy": null,
|
143 |
+
"sae_test_accuracy": 0.89000004529953,
|
144 |
+
"sae_top_1_test_accuracy": 0.764,
|
145 |
+
"sae_top_2_test_accuracy": 0.765,
|
146 |
+
"sae_top_5_test_accuracy": 0.798,
|
147 |
+
"sae_top_10_test_accuracy": 0.794,
|
148 |
+
"sae_top_20_test_accuracy": 0.807,
|
149 |
+
"sae_top_50_test_accuracy": 0.863,
|
150 |
+
"sae_top_100_test_accuracy": null
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"dataset_name": "codeparrot/github-code_results",
|
154 |
+
"llm_test_accuracy": 0.9621999999999999,
|
155 |
+
"llm_top_1_test_accuracy": 0.6902,
|
156 |
+
"llm_top_2_test_accuracy": 0.7020000000000001,
|
157 |
+
"llm_top_5_test_accuracy": 0.7926,
|
158 |
+
"llm_top_10_test_accuracy": 0.8710000000000001,
|
159 |
+
"llm_top_20_test_accuracy": 0.9044000000000001,
|
160 |
+
"llm_top_50_test_accuracy": 0.9266,
|
161 |
+
"llm_top_100_test_accuracy": null,
|
162 |
+
"sae_test_accuracy": 0.9598000407218933,
|
163 |
+
"sae_top_1_test_accuracy": 0.7376000000000001,
|
164 |
+
"sae_top_2_test_accuracy": 0.7806,
|
165 |
+
"sae_top_5_test_accuracy": 0.8583999999999999,
|
166 |
+
"sae_top_10_test_accuracy": 0.9112,
|
167 |
+
"sae_top_20_test_accuracy": 0.9245999999999999,
|
168 |
+
"sae_top_50_test_accuracy": 0.9404,
|
169 |
+
"sae_top_100_test_accuracy": null
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
173 |
+
"llm_test_accuracy": 0.927,
|
174 |
+
"llm_top_1_test_accuracy": 0.6825000000000001,
|
175 |
+
"llm_top_2_test_accuracy": 0.7164999999999999,
|
176 |
+
"llm_top_5_test_accuracy": 0.83575,
|
177 |
+
"llm_top_10_test_accuracy": 0.8674999999999999,
|
178 |
+
"llm_top_20_test_accuracy": 0.895,
|
179 |
+
"llm_top_50_test_accuracy": 0.9149999999999999,
|
180 |
+
"llm_top_100_test_accuracy": null,
|
181 |
+
"sae_test_accuracy": 0.9365000575780869,
|
182 |
+
"sae_top_1_test_accuracy": 0.8039999999999999,
|
183 |
+
"sae_top_2_test_accuracy": 0.8300000000000001,
|
184 |
+
"sae_top_5_test_accuracy": 0.88375,
|
185 |
+
"sae_top_10_test_accuracy": 0.9027499999999999,
|
186 |
+
"sae_top_20_test_accuracy": 0.91,
|
187 |
+
"sae_top_50_test_accuracy": 0.9197500000000001,
|
188 |
+
"sae_top_100_test_accuracy": null
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
192 |
+
"llm_test_accuracy": 0.9998000000000001,
|
193 |
+
"llm_top_1_test_accuracy": 0.6772,
|
194 |
+
"llm_top_2_test_accuracy": 0.8932,
|
195 |
+
"llm_top_5_test_accuracy": 0.994,
|
196 |
+
"llm_top_10_test_accuracy": 0.9968,
|
197 |
+
"llm_top_20_test_accuracy": 0.9982,
|
198 |
+
"llm_top_50_test_accuracy": 0.9992000000000001,
|
199 |
+
"llm_top_100_test_accuracy": null,
|
200 |
+
"sae_test_accuracy": 0.9996000051498413,
|
201 |
+
"sae_top_1_test_accuracy": 0.7858,
|
202 |
+
"sae_top_2_test_accuracy": 0.9176,
|
203 |
+
"sae_top_5_test_accuracy": 0.998,
|
204 |
+
"sae_top_10_test_accuracy": 0.9976,
|
205 |
+
"sae_top_20_test_accuracy": 0.9982,
|
206 |
+
"sae_top_50_test_accuracy": 0.9986,
|
207 |
+
"sae_top_100_test_accuracy": null
|
208 |
+
}
|
209 |
+
],
|
210 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
211 |
+
"sae_lens_id": "custom_sae",
|
212 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAETrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_12",
|
213 |
+
"sae_lens_version": "5.3.0",
|
214 |
+
"sae_cfg_dict": {
|
215 |
+
"model_name": "pythia-160m-deduped",
|
216 |
+
"d_in": 768,
|
217 |
+
"d_sae": 4096,
|
218 |
+
"hook_layer": 8,
|
219 |
+
"hook_name": "blocks.8.hook_resid_post",
|
220 |
+
"context_size": null,
|
221 |
+
"hook_head_index": null,
|
222 |
+
"architecture": "gated",
|
223 |
+
"apply_b_dec_to_input": null,
|
224 |
+
"finetuning_scaling_factor": null,
|
225 |
+
"activation_fn_str": "",
|
226 |
+
"prepend_bos": true,
|
227 |
+
"normalize_activations": "none",
|
228 |
+
"dtype": "float32",
|
229 |
+
"device": "",
|
230 |
+
"dataset_path": "",
|
231 |
+
"dataset_trust_remote_code": true,
|
232 |
+
"seqpos_slice": [
|
233 |
+
null
|
234 |
+
],
|
235 |
+
"training_tokens": 499998720,
|
236 |
+
"sae_lens_training_version": null,
|
237 |
+
"neuronpedia_id": null
|
238 |
+
},
|
239 |
+
"eval_result_unstructured": null
|
240 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_13_eval_results.json
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5,
|
26 |
+
10,
|
27 |
+
20,
|
28 |
+
50
|
29 |
+
],
|
30 |
+
"lower_vram_usage": false
|
31 |
+
},
|
32 |
+
"eval_id": "ed1c94c7-3ff3-46fa-8e76-5a2f274d0539",
|
33 |
+
"datetime_epoch_millis": 1736493877638,
|
34 |
+
"eval_result_metrics": {
|
35 |
+
"llm": {
|
36 |
+
"llm_test_accuracy": 0.9292125,
|
37 |
+
"llm_top_1_test_accuracy": 0.6425625,
|
38 |
+
"llm_top_2_test_accuracy": 0.7349125000000001,
|
39 |
+
"llm_top_5_test_accuracy": 0.8187187499999999,
|
40 |
+
"llm_top_10_test_accuracy": 0.8505625000000001,
|
41 |
+
"llm_top_20_test_accuracy": 0.8723249999999999,
|
42 |
+
"llm_top_50_test_accuracy": 0.9006,
|
43 |
+
"llm_top_100_test_accuracy": null
|
44 |
+
},
|
45 |
+
"sae": {
|
46 |
+
"sae_test_accuracy": 0.9335937898606061,
|
47 |
+
"sae_top_1_test_accuracy": 0.7714125000000001,
|
48 |
+
"sae_top_2_test_accuracy": 0.8151625,
|
49 |
+
"sae_top_5_test_accuracy": 0.85250625,
|
50 |
+
"sae_top_10_test_accuracy": 0.874375,
|
51 |
+
"sae_top_20_test_accuracy": 0.89373125,
|
52 |
+
"sae_top_50_test_accuracy": 0.90988125,
|
53 |
+
"sae_top_100_test_accuracy": null
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"eval_result_details": [
|
57 |
+
{
|
58 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
59 |
+
"llm_test_accuracy": 0.9512,
|
60 |
+
"llm_top_1_test_accuracy": 0.6045999999999999,
|
61 |
+
"llm_top_2_test_accuracy": 0.74,
|
62 |
+
"llm_top_5_test_accuracy": 0.7994,
|
63 |
+
"llm_top_10_test_accuracy": 0.8628,
|
64 |
+
"llm_top_20_test_accuracy": 0.8812000000000001,
|
65 |
+
"llm_top_50_test_accuracy": 0.921,
|
66 |
+
"llm_top_100_test_accuracy": null,
|
67 |
+
"sae_test_accuracy": 0.9536000490188599,
|
68 |
+
"sae_top_1_test_accuracy": 0.8096,
|
69 |
+
"sae_top_2_test_accuracy": 0.8427999999999999,
|
70 |
+
"sae_top_5_test_accuracy": 0.8566,
|
71 |
+
"sae_top_10_test_accuracy": 0.8737999999999999,
|
72 |
+
"sae_top_20_test_accuracy": 0.9044000000000001,
|
73 |
+
"sae_top_50_test_accuracy": 0.9218000000000002,
|
74 |
+
"sae_top_100_test_accuracy": null
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
78 |
+
"llm_test_accuracy": 0.9308,
|
79 |
+
"llm_top_1_test_accuracy": 0.5818,
|
80 |
+
"llm_top_2_test_accuracy": 0.7078,
|
81 |
+
"llm_top_5_test_accuracy": 0.8074,
|
82 |
+
"llm_top_10_test_accuracy": 0.8343999999999999,
|
83 |
+
"llm_top_20_test_accuracy": 0.8556000000000001,
|
84 |
+
"llm_top_50_test_accuracy": 0.8896000000000001,
|
85 |
+
"llm_top_100_test_accuracy": null,
|
86 |
+
"sae_test_accuracy": 0.936400043964386,
|
87 |
+
"sae_top_1_test_accuracy": 0.743,
|
88 |
+
"sae_top_2_test_accuracy": 0.7906000000000001,
|
89 |
+
"sae_top_5_test_accuracy": 0.8132000000000001,
|
90 |
+
"sae_top_10_test_accuracy": 0.8488000000000001,
|
91 |
+
"sae_top_20_test_accuracy": 0.8812,
|
92 |
+
"sae_top_50_test_accuracy": 0.9054,
|
93 |
+
"sae_top_100_test_accuracy": null
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
97 |
+
"llm_test_accuracy": 0.9016,
|
98 |
+
"llm_top_1_test_accuracy": 0.6472,
|
99 |
+
"llm_top_2_test_accuracy": 0.7143999999999999,
|
100 |
+
"llm_top_5_test_accuracy": 0.7936,
|
101 |
+
"llm_top_10_test_accuracy": 0.8148,
|
102 |
+
"llm_top_20_test_accuracy": 0.8454,
|
103 |
+
"llm_top_50_test_accuracy": 0.8714000000000001,
|
104 |
+
"llm_top_100_test_accuracy": null,
|
105 |
+
"sae_test_accuracy": 0.9140000343322754,
|
106 |
+
"sae_top_1_test_accuracy": 0.7289999999999999,
|
107 |
+
"sae_top_2_test_accuracy": 0.7516,
|
108 |
+
"sae_top_5_test_accuracy": 0.8046000000000001,
|
109 |
+
"sae_top_10_test_accuracy": 0.8390000000000001,
|
110 |
+
"sae_top_20_test_accuracy": 0.8624,
|
111 |
+
"sae_top_50_test_accuracy": 0.8836,
|
112 |
+
"sae_top_100_test_accuracy": null
|
113 |
+
},
|
114 |
+
{
|
115 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
116 |
+
"llm_test_accuracy": 0.8695999999999999,
|
117 |
+
"llm_top_1_test_accuracy": 0.617,
|
118 |
+
"llm_top_2_test_accuracy": 0.6884,
|
119 |
+
"llm_top_5_test_accuracy": 0.738,
|
120 |
+
"llm_top_10_test_accuracy": 0.7702,
|
121 |
+
"llm_top_20_test_accuracy": 0.7857999999999999,
|
122 |
+
"llm_top_50_test_accuracy": 0.826,
|
123 |
+
"llm_top_100_test_accuracy": null,
|
124 |
+
"sae_test_accuracy": 0.8742000460624695,
|
125 |
+
"sae_top_1_test_accuracy": 0.7120000000000001,
|
126 |
+
"sae_top_2_test_accuracy": 0.7549999999999999,
|
127 |
+
"sae_top_5_test_accuracy": 0.7844,
|
128 |
+
"sae_top_10_test_accuracy": 0.8102,
|
129 |
+
"sae_top_20_test_accuracy": 0.8172,
|
130 |
+
"sae_top_50_test_accuracy": 0.8358000000000001,
|
131 |
+
"sae_top_100_test_accuracy": null
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
135 |
+
"llm_test_accuracy": 0.8915,
|
136 |
+
"llm_top_1_test_accuracy": 0.64,
|
137 |
+
"llm_top_2_test_accuracy": 0.717,
|
138 |
+
"llm_top_5_test_accuracy": 0.789,
|
139 |
+
"llm_top_10_test_accuracy": 0.787,
|
140 |
+
"llm_top_20_test_accuracy": 0.813,
|
141 |
+
"llm_top_50_test_accuracy": 0.856,
|
142 |
+
"llm_top_100_test_accuracy": null,
|
143 |
+
"sae_test_accuracy": 0.8925000429153442,
|
144 |
+
"sae_top_1_test_accuracy": 0.766,
|
145 |
+
"sae_top_2_test_accuracy": 0.765,
|
146 |
+
"sae_top_5_test_accuracy": 0.8,
|
147 |
+
"sae_top_10_test_accuracy": 0.797,
|
148 |
+
"sae_top_20_test_accuracy": 0.847,
|
149 |
+
"sae_top_50_test_accuracy": 0.8685,
|
150 |
+
"sae_top_100_test_accuracy": null
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"dataset_name": "codeparrot/github-code_results",
|
154 |
+
"llm_test_accuracy": 0.9621999999999999,
|
155 |
+
"llm_top_1_test_accuracy": 0.6902,
|
156 |
+
"llm_top_2_test_accuracy": 0.7020000000000001,
|
157 |
+
"llm_top_5_test_accuracy": 0.7926,
|
158 |
+
"llm_top_10_test_accuracy": 0.8710000000000001,
|
159 |
+
"llm_top_20_test_accuracy": 0.9044000000000001,
|
160 |
+
"llm_top_50_test_accuracy": 0.9266,
|
161 |
+
"llm_top_100_test_accuracy": null,
|
162 |
+
"sae_test_accuracy": 0.963200044631958,
|
163 |
+
"sae_top_1_test_accuracy": 0.7384000000000001,
|
164 |
+
"sae_top_2_test_accuracy": 0.7976000000000001,
|
165 |
+
"sae_top_5_test_accuracy": 0.889,
|
166 |
+
"sae_top_10_test_accuracy": 0.9244,
|
167 |
+
"sae_top_20_test_accuracy": 0.9273999999999999,
|
168 |
+
"sae_top_50_test_accuracy": 0.9388,
|
169 |
+
"sae_top_100_test_accuracy": null
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
173 |
+
"llm_test_accuracy": 0.927,
|
174 |
+
"llm_top_1_test_accuracy": 0.6825000000000001,
|
175 |
+
"llm_top_2_test_accuracy": 0.7164999999999999,
|
176 |
+
"llm_top_5_test_accuracy": 0.83575,
|
177 |
+
"llm_top_10_test_accuracy": 0.8674999999999999,
|
178 |
+
"llm_top_20_test_accuracy": 0.895,
|
179 |
+
"llm_top_50_test_accuracy": 0.9149999999999999,
|
180 |
+
"llm_top_100_test_accuracy": null,
|
181 |
+
"sae_test_accuracy": 0.9352500289678574,
|
182 |
+
"sae_top_1_test_accuracy": 0.7975000000000001,
|
183 |
+
"sae_top_2_test_accuracy": 0.8385,
|
184 |
+
"sae_top_5_test_accuracy": 0.87925,
|
185 |
+
"sae_top_10_test_accuracy": 0.903,
|
186 |
+
"sae_top_20_test_accuracy": 0.9112500000000001,
|
187 |
+
"sae_top_50_test_accuracy": 0.92575,
|
188 |
+
"sae_top_100_test_accuracy": null
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
192 |
+
"llm_test_accuracy": 0.9998000000000001,
|
193 |
+
"llm_top_1_test_accuracy": 0.6772,
|
194 |
+
"llm_top_2_test_accuracy": 0.8932,
|
195 |
+
"llm_top_5_test_accuracy": 0.994,
|
196 |
+
"llm_top_10_test_accuracy": 0.9968,
|
197 |
+
"llm_top_20_test_accuracy": 0.9982,
|
198 |
+
"llm_top_50_test_accuracy": 0.9992000000000001,
|
199 |
+
"llm_top_100_test_accuracy": null,
|
200 |
+
"sae_test_accuracy": 0.9996000289916992,
|
201 |
+
"sae_top_1_test_accuracy": 0.8757999999999999,
|
202 |
+
"sae_top_2_test_accuracy": 0.9802,
|
203 |
+
"sae_top_5_test_accuracy": 0.993,
|
204 |
+
"sae_top_10_test_accuracy": 0.9987999999999999,
|
205 |
+
"sae_top_20_test_accuracy": 0.999,
|
206 |
+
"sae_top_50_test_accuracy": 0.9994,
|
207 |
+
"sae_top_100_test_accuracy": null
|
208 |
+
}
|
209 |
+
],
|
210 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
211 |
+
"sae_lens_id": "custom_sae",
|
212 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAETrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_13",
|
213 |
+
"sae_lens_version": "5.3.0",
|
214 |
+
"sae_cfg_dict": {
|
215 |
+
"model_name": "pythia-160m-deduped",
|
216 |
+
"d_in": 768,
|
217 |
+
"d_sae": 4096,
|
218 |
+
"hook_layer": 8,
|
219 |
+
"hook_name": "blocks.8.hook_resid_post",
|
220 |
+
"context_size": null,
|
221 |
+
"hook_head_index": null,
|
222 |
+
"architecture": "gated",
|
223 |
+
"apply_b_dec_to_input": null,
|
224 |
+
"finetuning_scaling_factor": null,
|
225 |
+
"activation_fn_str": "",
|
226 |
+
"prepend_bos": true,
|
227 |
+
"normalize_activations": "none",
|
228 |
+
"dtype": "float32",
|
229 |
+
"device": "",
|
230 |
+
"dataset_path": "",
|
231 |
+
"dataset_trust_remote_code": true,
|
232 |
+
"seqpos_slice": [
|
233 |
+
null
|
234 |
+
],
|
235 |
+
"training_tokens": 499998720,
|
236 |
+
"sae_lens_training_version": null,
|
237 |
+
"neuronpedia_id": null
|
238 |
+
},
|
239 |
+
"eval_result_unstructured": null
|
240 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_14_eval_results.json
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5,
|
26 |
+
10,
|
27 |
+
20,
|
28 |
+
50
|
29 |
+
],
|
30 |
+
"lower_vram_usage": false
|
31 |
+
},
|
32 |
+
"eval_id": "be3c44c6-733f-4b43-a3c2-a720d46ac84b",
|
33 |
+
"datetime_epoch_millis": 1736494482149,
|
34 |
+
"eval_result_metrics": {
|
35 |
+
"llm": {
|
36 |
+
"llm_test_accuracy": 0.9292125,
|
37 |
+
"llm_top_1_test_accuracy": 0.6425625,
|
38 |
+
"llm_top_2_test_accuracy": 0.7349125000000001,
|
39 |
+
"llm_top_5_test_accuracy": 0.8187187499999999,
|
40 |
+
"llm_top_10_test_accuracy": 0.8505625000000001,
|
41 |
+
"llm_top_20_test_accuracy": 0.8723249999999999,
|
42 |
+
"llm_top_50_test_accuracy": 0.9006,
|
43 |
+
"llm_top_100_test_accuracy": null
|
44 |
+
},
|
45 |
+
"sae": {
|
46 |
+
"sae_test_accuracy": 0.9338437952101232,
|
47 |
+
"sae_top_1_test_accuracy": 0.7462687499999999,
|
48 |
+
"sae_top_2_test_accuracy": 0.8165437500000001,
|
49 |
+
"sae_top_5_test_accuracy": 0.8548687499999998,
|
50 |
+
"sae_top_10_test_accuracy": 0.8827812499999999,
|
51 |
+
"sae_top_20_test_accuracy": 0.89785,
|
52 |
+
"sae_top_50_test_accuracy": 0.91461875,
|
53 |
+
"sae_top_100_test_accuracy": null
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"eval_result_details": [
|
57 |
+
{
|
58 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
59 |
+
"llm_test_accuracy": 0.9512,
|
60 |
+
"llm_top_1_test_accuracy": 0.6045999999999999,
|
61 |
+
"llm_top_2_test_accuracy": 0.74,
|
62 |
+
"llm_top_5_test_accuracy": 0.7994,
|
63 |
+
"llm_top_10_test_accuracy": 0.8628,
|
64 |
+
"llm_top_20_test_accuracy": 0.8812000000000001,
|
65 |
+
"llm_top_50_test_accuracy": 0.921,
|
66 |
+
"llm_top_100_test_accuracy": null,
|
67 |
+
"sae_test_accuracy": 0.9532000422477722,
|
68 |
+
"sae_top_1_test_accuracy": 0.8023999999999999,
|
69 |
+
"sae_top_2_test_accuracy": 0.8426,
|
70 |
+
"sae_top_5_test_accuracy": 0.849,
|
71 |
+
"sae_top_10_test_accuracy": 0.8854,
|
72 |
+
"sae_top_20_test_accuracy": 0.9112,
|
73 |
+
"sae_top_50_test_accuracy": 0.933,
|
74 |
+
"sae_top_100_test_accuracy": null
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
78 |
+
"llm_test_accuracy": 0.9308,
|
79 |
+
"llm_top_1_test_accuracy": 0.5818,
|
80 |
+
"llm_top_2_test_accuracy": 0.7078,
|
81 |
+
"llm_top_5_test_accuracy": 0.8074,
|
82 |
+
"llm_top_10_test_accuracy": 0.8343999999999999,
|
83 |
+
"llm_top_20_test_accuracy": 0.8556000000000001,
|
84 |
+
"llm_top_50_test_accuracy": 0.8896000000000001,
|
85 |
+
"llm_top_100_test_accuracy": null,
|
86 |
+
"sae_test_accuracy": 0.9338000535964965,
|
87 |
+
"sae_top_1_test_accuracy": 0.7154,
|
88 |
+
"sae_top_2_test_accuracy": 0.7487999999999999,
|
89 |
+
"sae_top_5_test_accuracy": 0.8204,
|
90 |
+
"sae_top_10_test_accuracy": 0.8497999999999999,
|
91 |
+
"sae_top_20_test_accuracy": 0.8806,
|
92 |
+
"sae_top_50_test_accuracy": 0.8995999999999998,
|
93 |
+
"sae_top_100_test_accuracy": null
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
97 |
+
"llm_test_accuracy": 0.9016,
|
98 |
+
"llm_top_1_test_accuracy": 0.6472,
|
99 |
+
"llm_top_2_test_accuracy": 0.7143999999999999,
|
100 |
+
"llm_top_5_test_accuracy": 0.7936,
|
101 |
+
"llm_top_10_test_accuracy": 0.8148,
|
102 |
+
"llm_top_20_test_accuracy": 0.8454,
|
103 |
+
"llm_top_50_test_accuracy": 0.8714000000000001,
|
104 |
+
"llm_top_100_test_accuracy": null,
|
105 |
+
"sae_test_accuracy": 0.9130000472068787,
|
106 |
+
"sae_top_1_test_accuracy": 0.7476,
|
107 |
+
"sae_top_2_test_accuracy": 0.7546,
|
108 |
+
"sae_top_5_test_accuracy": 0.8001999999999999,
|
109 |
+
"sae_top_10_test_accuracy": 0.8398,
|
110 |
+
"sae_top_20_test_accuracy": 0.8542,
|
111 |
+
"sae_top_50_test_accuracy": 0.8827999999999999,
|
112 |
+
"sae_top_100_test_accuracy": null
|
113 |
+
},
|
114 |
+
{
|
115 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
116 |
+
"llm_test_accuracy": 0.8695999999999999,
|
117 |
+
"llm_top_1_test_accuracy": 0.617,
|
118 |
+
"llm_top_2_test_accuracy": 0.6884,
|
119 |
+
"llm_top_5_test_accuracy": 0.738,
|
120 |
+
"llm_top_10_test_accuracy": 0.7702,
|
121 |
+
"llm_top_20_test_accuracy": 0.7857999999999999,
|
122 |
+
"llm_top_50_test_accuracy": 0.826,
|
123 |
+
"llm_top_100_test_accuracy": null,
|
124 |
+
"sae_test_accuracy": 0.8740000486373901,
|
125 |
+
"sae_top_1_test_accuracy": 0.7182000000000001,
|
126 |
+
"sae_top_2_test_accuracy": 0.7238,
|
127 |
+
"sae_top_5_test_accuracy": 0.7904,
|
128 |
+
"sae_top_10_test_accuracy": 0.8107999999999999,
|
129 |
+
"sae_top_20_test_accuracy": 0.8244,
|
130 |
+
"sae_top_50_test_accuracy": 0.8455999999999999,
|
131 |
+
"sae_top_100_test_accuracy": null
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
135 |
+
"llm_test_accuracy": 0.8915,
|
136 |
+
"llm_top_1_test_accuracy": 0.64,
|
137 |
+
"llm_top_2_test_accuracy": 0.717,
|
138 |
+
"llm_top_5_test_accuracy": 0.789,
|
139 |
+
"llm_top_10_test_accuracy": 0.787,
|
140 |
+
"llm_top_20_test_accuracy": 0.813,
|
141 |
+
"llm_top_50_test_accuracy": 0.856,
|
142 |
+
"llm_top_100_test_accuracy": null,
|
143 |
+
"sae_test_accuracy": 0.8975000381469727,
|
144 |
+
"sae_top_1_test_accuracy": 0.581,
|
145 |
+
"sae_top_2_test_accuracy": 0.789,
|
146 |
+
"sae_top_5_test_accuracy": 0.802,
|
147 |
+
"sae_top_10_test_accuracy": 0.853,
|
148 |
+
"sae_top_20_test_accuracy": 0.877,
|
149 |
+
"sae_top_50_test_accuracy": 0.883,
|
150 |
+
"sae_top_100_test_accuracy": null
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"dataset_name": "codeparrot/github-code_results",
|
154 |
+
"llm_test_accuracy": 0.9621999999999999,
|
155 |
+
"llm_top_1_test_accuracy": 0.6902,
|
156 |
+
"llm_top_2_test_accuracy": 0.7020000000000001,
|
157 |
+
"llm_top_5_test_accuracy": 0.7926,
|
158 |
+
"llm_top_10_test_accuracy": 0.8710000000000001,
|
159 |
+
"llm_top_20_test_accuracy": 0.9044000000000001,
|
160 |
+
"llm_top_50_test_accuracy": 0.9266,
|
161 |
+
"llm_top_100_test_accuracy": null,
|
162 |
+
"sae_test_accuracy": 0.9626000404357911,
|
163 |
+
"sae_top_1_test_accuracy": 0.6284000000000001,
|
164 |
+
"sae_top_2_test_accuracy": 0.8518000000000001,
|
165 |
+
"sae_top_5_test_accuracy": 0.891,
|
166 |
+
"sae_top_10_test_accuracy": 0.9238,
|
167 |
+
"sae_top_20_test_accuracy": 0.9236000000000001,
|
168 |
+
"sae_top_50_test_accuracy": 0.9434000000000001,
|
169 |
+
"sae_top_100_test_accuracy": null
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
173 |
+
"llm_test_accuracy": 0.927,
|
174 |
+
"llm_top_1_test_accuracy": 0.6825000000000001,
|
175 |
+
"llm_top_2_test_accuracy": 0.7164999999999999,
|
176 |
+
"llm_top_5_test_accuracy": 0.83575,
|
177 |
+
"llm_top_10_test_accuracy": 0.8674999999999999,
|
178 |
+
"llm_top_20_test_accuracy": 0.895,
|
179 |
+
"llm_top_50_test_accuracy": 0.9149999999999999,
|
180 |
+
"llm_top_100_test_accuracy": null,
|
181 |
+
"sae_test_accuracy": 0.9372500479221344,
|
182 |
+
"sae_top_1_test_accuracy": 0.80575,
|
183 |
+
"sae_top_2_test_accuracy": 0.83375,
|
184 |
+
"sae_top_5_test_accuracy": 0.8877499999999999,
|
185 |
+
"sae_top_10_test_accuracy": 0.90025,
|
186 |
+
"sae_top_20_test_accuracy": 0.913,
|
187 |
+
"sae_top_50_test_accuracy": 0.9307500000000001,
|
188 |
+
"sae_top_100_test_accuracy": null
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
192 |
+
"llm_test_accuracy": 0.9998000000000001,
|
193 |
+
"llm_top_1_test_accuracy": 0.6772,
|
194 |
+
"llm_top_2_test_accuracy": 0.8932,
|
195 |
+
"llm_top_5_test_accuracy": 0.994,
|
196 |
+
"llm_top_10_test_accuracy": 0.9968,
|
197 |
+
"llm_top_20_test_accuracy": 0.9982,
|
198 |
+
"llm_top_50_test_accuracy": 0.9992000000000001,
|
199 |
+
"llm_top_100_test_accuracy": null,
|
200 |
+
"sae_test_accuracy": 0.9994000434875489,
|
201 |
+
"sae_top_1_test_accuracy": 0.9714,
|
202 |
+
"sae_top_2_test_accuracy": 0.9880000000000001,
|
203 |
+
"sae_top_5_test_accuracy": 0.9982,
|
204 |
+
"sae_top_10_test_accuracy": 0.9994,
|
205 |
+
"sae_top_20_test_accuracy": 0.9987999999999999,
|
206 |
+
"sae_top_50_test_accuracy": 0.9987999999999999,
|
207 |
+
"sae_top_100_test_accuracy": null
|
208 |
+
}
|
209 |
+
],
|
210 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
211 |
+
"sae_lens_id": "custom_sae",
|
212 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAETrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_14",
|
213 |
+
"sae_lens_version": "5.3.0",
|
214 |
+
"sae_cfg_dict": {
|
215 |
+
"model_name": "pythia-160m-deduped",
|
216 |
+
"d_in": 768,
|
217 |
+
"d_sae": 4096,
|
218 |
+
"hook_layer": 8,
|
219 |
+
"hook_name": "blocks.8.hook_resid_post",
|
220 |
+
"context_size": null,
|
221 |
+
"hook_head_index": null,
|
222 |
+
"architecture": "gated",
|
223 |
+
"apply_b_dec_to_input": null,
|
224 |
+
"finetuning_scaling_factor": null,
|
225 |
+
"activation_fn_str": "",
|
226 |
+
"prepend_bos": true,
|
227 |
+
"normalize_activations": "none",
|
228 |
+
"dtype": "float32",
|
229 |
+
"device": "",
|
230 |
+
"dataset_path": "",
|
231 |
+
"dataset_trust_remote_code": true,
|
232 |
+
"seqpos_slice": [
|
233 |
+
null
|
234 |
+
],
|
235 |
+
"training_tokens": 499998720,
|
236 |
+
"sae_lens_training_version": null,
|
237 |
+
"neuronpedia_id": null
|
238 |
+
},
|
239 |
+
"eval_result_unstructured": null
|
240 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_15_eval_results.json
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5,
|
26 |
+
10,
|
27 |
+
20,
|
28 |
+
50
|
29 |
+
],
|
30 |
+
"lower_vram_usage": false
|
31 |
+
},
|
32 |
+
"eval_id": "5bfbf620-92c0-402c-a12e-8324864105fa",
|
33 |
+
"datetime_epoch_millis": 1736494957646,
|
34 |
+
"eval_result_metrics": {
|
35 |
+
"llm": {
|
36 |
+
"llm_test_accuracy": 0.9292125,
|
37 |
+
"llm_top_1_test_accuracy": 0.6425625,
|
38 |
+
"llm_top_2_test_accuracy": 0.7349125000000001,
|
39 |
+
"llm_top_5_test_accuracy": 0.8187187499999999,
|
40 |
+
"llm_top_10_test_accuracy": 0.8505625000000001,
|
41 |
+
"llm_top_20_test_accuracy": 0.8723249999999999,
|
42 |
+
"llm_top_50_test_accuracy": 0.9006,
|
43 |
+
"llm_top_100_test_accuracy": null
|
44 |
+
},
|
45 |
+
"sae": {
|
46 |
+
"sae_test_accuracy": 0.9335437893867493,
|
47 |
+
"sae_top_1_test_accuracy": 0.7421125,
|
48 |
+
"sae_top_2_test_accuracy": 0.7919187499999999,
|
49 |
+
"sae_top_5_test_accuracy": 0.8534749999999999,
|
50 |
+
"sae_top_10_test_accuracy": 0.8788,
|
51 |
+
"sae_top_20_test_accuracy": 0.89895625,
|
52 |
+
"sae_top_50_test_accuracy": 0.9125187499999999,
|
53 |
+
"sae_top_100_test_accuracy": null
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"eval_result_details": [
|
57 |
+
{
|
58 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
59 |
+
"llm_test_accuracy": 0.9512,
|
60 |
+
"llm_top_1_test_accuracy": 0.6045999999999999,
|
61 |
+
"llm_top_2_test_accuracy": 0.74,
|
62 |
+
"llm_top_5_test_accuracy": 0.7994,
|
63 |
+
"llm_top_10_test_accuracy": 0.8628,
|
64 |
+
"llm_top_20_test_accuracy": 0.8812000000000001,
|
65 |
+
"llm_top_50_test_accuracy": 0.921,
|
66 |
+
"llm_top_100_test_accuracy": null,
|
67 |
+
"sae_test_accuracy": 0.9540000438690186,
|
68 |
+
"sae_top_1_test_accuracy": 0.7496,
|
69 |
+
"sae_top_2_test_accuracy": 0.7902000000000001,
|
70 |
+
"sae_top_5_test_accuracy": 0.8708,
|
71 |
+
"sae_top_10_test_accuracy": 0.8869999999999999,
|
72 |
+
"sae_top_20_test_accuracy": 0.9091999999999999,
|
73 |
+
"sae_top_50_test_accuracy": 0.9294,
|
74 |
+
"sae_top_100_test_accuracy": null
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
78 |
+
"llm_test_accuracy": 0.9308,
|
79 |
+
"llm_top_1_test_accuracy": 0.5818,
|
80 |
+
"llm_top_2_test_accuracy": 0.7078,
|
81 |
+
"llm_top_5_test_accuracy": 0.8074,
|
82 |
+
"llm_top_10_test_accuracy": 0.8343999999999999,
|
83 |
+
"llm_top_20_test_accuracy": 0.8556000000000001,
|
84 |
+
"llm_top_50_test_accuracy": 0.8896000000000001,
|
85 |
+
"llm_top_100_test_accuracy": null,
|
86 |
+
"sae_test_accuracy": 0.9312000393867492,
|
87 |
+
"sae_top_1_test_accuracy": 0.6976,
|
88 |
+
"sae_top_2_test_accuracy": 0.7502,
|
89 |
+
"sae_top_5_test_accuracy": 0.8314,
|
90 |
+
"sae_top_10_test_accuracy": 0.8408000000000001,
|
91 |
+
"sae_top_20_test_accuracy": 0.884,
|
92 |
+
"sae_top_50_test_accuracy": 0.9019999999999999,
|
93 |
+
"sae_top_100_test_accuracy": null
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
97 |
+
"llm_test_accuracy": 0.9016,
|
98 |
+
"llm_top_1_test_accuracy": 0.6472,
|
99 |
+
"llm_top_2_test_accuracy": 0.7143999999999999,
|
100 |
+
"llm_top_5_test_accuracy": 0.7936,
|
101 |
+
"llm_top_10_test_accuracy": 0.8148,
|
102 |
+
"llm_top_20_test_accuracy": 0.8454,
|
103 |
+
"llm_top_50_test_accuracy": 0.8714000000000001,
|
104 |
+
"llm_top_100_test_accuracy": null,
|
105 |
+
"sae_test_accuracy": 0.9122000455856323,
|
106 |
+
"sae_top_1_test_accuracy": 0.727,
|
107 |
+
"sae_top_2_test_accuracy": 0.7462000000000001,
|
108 |
+
"sae_top_5_test_accuracy": 0.8116,
|
109 |
+
"sae_top_10_test_accuracy": 0.8416,
|
110 |
+
"sae_top_20_test_accuracy": 0.8577999999999999,
|
111 |
+
"sae_top_50_test_accuracy": 0.881,
|
112 |
+
"sae_top_100_test_accuracy": null
|
113 |
+
},
|
114 |
+
{
|
115 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
116 |
+
"llm_test_accuracy": 0.8695999999999999,
|
117 |
+
"llm_top_1_test_accuracy": 0.617,
|
118 |
+
"llm_top_2_test_accuracy": 0.6884,
|
119 |
+
"llm_top_5_test_accuracy": 0.738,
|
120 |
+
"llm_top_10_test_accuracy": 0.7702,
|
121 |
+
"llm_top_20_test_accuracy": 0.7857999999999999,
|
122 |
+
"llm_top_50_test_accuracy": 0.826,
|
123 |
+
"llm_top_100_test_accuracy": null,
|
124 |
+
"sae_test_accuracy": 0.8702000498771667,
|
125 |
+
"sae_top_1_test_accuracy": 0.7082,
|
126 |
+
"sae_top_2_test_accuracy": 0.7182,
|
127 |
+
"sae_top_5_test_accuracy": 0.7571999999999999,
|
128 |
+
"sae_top_10_test_accuracy": 0.799,
|
129 |
+
"sae_top_20_test_accuracy": 0.8256,
|
130 |
+
"sae_top_50_test_accuracy": 0.8408000000000001,
|
131 |
+
"sae_top_100_test_accuracy": null
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
135 |
+
"llm_test_accuracy": 0.8915,
|
136 |
+
"llm_top_1_test_accuracy": 0.64,
|
137 |
+
"llm_top_2_test_accuracy": 0.717,
|
138 |
+
"llm_top_5_test_accuracy": 0.789,
|
139 |
+
"llm_top_10_test_accuracy": 0.787,
|
140 |
+
"llm_top_20_test_accuracy": 0.813,
|
141 |
+
"llm_top_50_test_accuracy": 0.856,
|
142 |
+
"llm_top_100_test_accuracy": null,
|
143 |
+
"sae_test_accuracy": 0.8965000212192535,
|
144 |
+
"sae_top_1_test_accuracy": 0.582,
|
145 |
+
"sae_top_2_test_accuracy": 0.695,
|
146 |
+
"sae_top_5_test_accuracy": 0.762,
|
147 |
+
"sae_top_10_test_accuracy": 0.851,
|
148 |
+
"sae_top_20_test_accuracy": 0.874,
|
149 |
+
"sae_top_50_test_accuracy": 0.879,
|
150 |
+
"sae_top_100_test_accuracy": null
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"dataset_name": "codeparrot/github-code_results",
|
154 |
+
"llm_test_accuracy": 0.9621999999999999,
|
155 |
+
"llm_top_1_test_accuracy": 0.6902,
|
156 |
+
"llm_top_2_test_accuracy": 0.7020000000000001,
|
157 |
+
"llm_top_5_test_accuracy": 0.7926,
|
158 |
+
"llm_top_10_test_accuracy": 0.8710000000000001,
|
159 |
+
"llm_top_20_test_accuracy": 0.9044000000000001,
|
160 |
+
"llm_top_50_test_accuracy": 0.9266,
|
161 |
+
"llm_top_100_test_accuracy": null,
|
162 |
+
"sae_test_accuracy": 0.9680000305175781,
|
163 |
+
"sae_top_1_test_accuracy": 0.6971999999999999,
|
164 |
+
"sae_top_2_test_accuracy": 0.8190000000000002,
|
165 |
+
"sae_top_5_test_accuracy": 0.914,
|
166 |
+
"sae_top_10_test_accuracy": 0.9179999999999999,
|
167 |
+
"sae_top_20_test_accuracy": 0.9288000000000001,
|
168 |
+
"sae_top_50_test_accuracy": 0.9436,
|
169 |
+
"sae_top_100_test_accuracy": null
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
173 |
+
"llm_test_accuracy": 0.927,
|
174 |
+
"llm_top_1_test_accuracy": 0.6825000000000001,
|
175 |
+
"llm_top_2_test_accuracy": 0.7164999999999999,
|
176 |
+
"llm_top_5_test_accuracy": 0.83575,
|
177 |
+
"llm_top_10_test_accuracy": 0.8674999999999999,
|
178 |
+
"llm_top_20_test_accuracy": 0.895,
|
179 |
+
"llm_top_50_test_accuracy": 0.9149999999999999,
|
180 |
+
"llm_top_100_test_accuracy": null,
|
181 |
+
"sae_test_accuracy": 0.9372500479221344,
|
182 |
+
"sae_top_1_test_accuracy": 0.8165,
|
183 |
+
"sae_top_2_test_accuracy": 0.83275,
|
184 |
+
"sae_top_5_test_accuracy": 0.882,
|
185 |
+
"sae_top_10_test_accuracy": 0.8939999999999999,
|
186 |
+
"sae_top_20_test_accuracy": 0.9132500000000001,
|
187 |
+
"sae_top_50_test_accuracy": 0.92475,
|
188 |
+
"sae_top_100_test_accuracy": null
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
192 |
+
"llm_test_accuracy": 0.9998000000000001,
|
193 |
+
"llm_top_1_test_accuracy": 0.6772,
|
194 |
+
"llm_top_2_test_accuracy": 0.8932,
|
195 |
+
"llm_top_5_test_accuracy": 0.994,
|
196 |
+
"llm_top_10_test_accuracy": 0.9968,
|
197 |
+
"llm_top_20_test_accuracy": 0.9982,
|
198 |
+
"llm_top_50_test_accuracy": 0.9992000000000001,
|
199 |
+
"llm_top_100_test_accuracy": null,
|
200 |
+
"sae_test_accuracy": 0.9990000367164612,
|
201 |
+
"sae_top_1_test_accuracy": 0.9587999999999999,
|
202 |
+
"sae_top_2_test_accuracy": 0.9837999999999999,
|
203 |
+
"sae_top_5_test_accuracy": 0.9987999999999999,
|
204 |
+
"sae_top_10_test_accuracy": 0.999,
|
205 |
+
"sae_top_20_test_accuracy": 0.999,
|
206 |
+
"sae_top_50_test_accuracy": 0.9996,
|
207 |
+
"sae_top_100_test_accuracy": null
|
208 |
+
}
|
209 |
+
],
|
210 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
211 |
+
"sae_lens_id": "custom_sae",
|
212 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAETrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_15",
|
213 |
+
"sae_lens_version": "5.3.0",
|
214 |
+
"sae_cfg_dict": {
|
215 |
+
"model_name": "pythia-160m-deduped",
|
216 |
+
"d_in": 768,
|
217 |
+
"d_sae": 4096,
|
218 |
+
"hook_layer": 8,
|
219 |
+
"hook_name": "blocks.8.hook_resid_post",
|
220 |
+
"context_size": null,
|
221 |
+
"hook_head_index": null,
|
222 |
+
"architecture": "gated",
|
223 |
+
"apply_b_dec_to_input": null,
|
224 |
+
"finetuning_scaling_factor": null,
|
225 |
+
"activation_fn_str": "",
|
226 |
+
"prepend_bos": true,
|
227 |
+
"normalize_activations": "none",
|
228 |
+
"dtype": "float32",
|
229 |
+
"device": "",
|
230 |
+
"dataset_path": "",
|
231 |
+
"dataset_trust_remote_code": true,
|
232 |
+
"seqpos_slice": [
|
233 |
+
null
|
234 |
+
],
|
235 |
+
"training_tokens": 499998720,
|
236 |
+
"sae_lens_training_version": null,
|
237 |
+
"neuronpedia_id": null
|
238 |
+
},
|
239 |
+
"eval_result_unstructured": null
|
240 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_16_eval_results.json
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5,
|
26 |
+
10,
|
27 |
+
20,
|
28 |
+
50
|
29 |
+
],
|
30 |
+
"lower_vram_usage": false
|
31 |
+
},
|
32 |
+
"eval_id": "93de3643-62c3-4e44-b5d6-7433413790e2",
|
33 |
+
"datetime_epoch_millis": 1736495395956,
|
34 |
+
"eval_result_metrics": {
|
35 |
+
"llm": {
|
36 |
+
"llm_test_accuracy": 0.9292125,
|
37 |
+
"llm_top_1_test_accuracy": 0.6425625,
|
38 |
+
"llm_top_2_test_accuracy": 0.7349125000000001,
|
39 |
+
"llm_top_5_test_accuracy": 0.8187187499999999,
|
40 |
+
"llm_top_10_test_accuracy": 0.8505625000000001,
|
41 |
+
"llm_top_20_test_accuracy": 0.8723249999999999,
|
42 |
+
"llm_top_50_test_accuracy": 0.9006,
|
43 |
+
"llm_top_100_test_accuracy": null
|
44 |
+
},
|
45 |
+
"sae": {
|
46 |
+
"sae_test_accuracy": 0.9336000479757786,
|
47 |
+
"sae_top_1_test_accuracy": 0.7501,
|
48 |
+
"sae_top_2_test_accuracy": 0.7865812500000001,
|
49 |
+
"sae_top_5_test_accuracy": 0.8416875,
|
50 |
+
"sae_top_10_test_accuracy": 0.875775,
|
51 |
+
"sae_top_20_test_accuracy": 0.8978187499999998,
|
52 |
+
"sae_top_50_test_accuracy": 0.9126625,
|
53 |
+
"sae_top_100_test_accuracy": null
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"eval_result_details": [
|
57 |
+
{
|
58 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
59 |
+
"llm_test_accuracy": 0.9512,
|
60 |
+
"llm_top_1_test_accuracy": 0.6045999999999999,
|
61 |
+
"llm_top_2_test_accuracy": 0.74,
|
62 |
+
"llm_top_5_test_accuracy": 0.7994,
|
63 |
+
"llm_top_10_test_accuracy": 0.8628,
|
64 |
+
"llm_top_20_test_accuracy": 0.8812000000000001,
|
65 |
+
"llm_top_50_test_accuracy": 0.921,
|
66 |
+
"llm_top_100_test_accuracy": null,
|
67 |
+
"sae_test_accuracy": 0.9510000467300415,
|
68 |
+
"sae_top_1_test_accuracy": 0.7704000000000001,
|
69 |
+
"sae_top_2_test_accuracy": 0.7806,
|
70 |
+
"sae_top_5_test_accuracy": 0.8419999999999999,
|
71 |
+
"sae_top_10_test_accuracy": 0.8912000000000001,
|
72 |
+
"sae_top_20_test_accuracy": 0.917,
|
73 |
+
"sae_top_50_test_accuracy": 0.932,
|
74 |
+
"sae_top_100_test_accuracy": null
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
78 |
+
"llm_test_accuracy": 0.9308,
|
79 |
+
"llm_top_1_test_accuracy": 0.5818,
|
80 |
+
"llm_top_2_test_accuracy": 0.7078,
|
81 |
+
"llm_top_5_test_accuracy": 0.8074,
|
82 |
+
"llm_top_10_test_accuracy": 0.8343999999999999,
|
83 |
+
"llm_top_20_test_accuracy": 0.8556000000000001,
|
84 |
+
"llm_top_50_test_accuracy": 0.8896000000000001,
|
85 |
+
"llm_top_100_test_accuracy": null,
|
86 |
+
"sae_test_accuracy": 0.9278000473976136,
|
87 |
+
"sae_top_1_test_accuracy": 0.7372,
|
88 |
+
"sae_top_2_test_accuracy": 0.772,
|
89 |
+
"sae_top_5_test_accuracy": 0.8288,
|
90 |
+
"sae_top_10_test_accuracy": 0.8526,
|
91 |
+
"sae_top_20_test_accuracy": 0.8815999999999999,
|
92 |
+
"sae_top_50_test_accuracy": 0.907,
|
93 |
+
"sae_top_100_test_accuracy": null
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
97 |
+
"llm_test_accuracy": 0.9016,
|
98 |
+
"llm_top_1_test_accuracy": 0.6472,
|
99 |
+
"llm_top_2_test_accuracy": 0.7143999999999999,
|
100 |
+
"llm_top_5_test_accuracy": 0.7936,
|
101 |
+
"llm_top_10_test_accuracy": 0.8148,
|
102 |
+
"llm_top_20_test_accuracy": 0.8454,
|
103 |
+
"llm_top_50_test_accuracy": 0.8714000000000001,
|
104 |
+
"llm_top_100_test_accuracy": null,
|
105 |
+
"sae_test_accuracy": 0.9112000465393066,
|
106 |
+
"sae_top_1_test_accuracy": 0.735,
|
107 |
+
"sae_top_2_test_accuracy": 0.7594000000000001,
|
108 |
+
"sae_top_5_test_accuracy": 0.8226000000000001,
|
109 |
+
"sae_top_10_test_accuracy": 0.8406,
|
110 |
+
"sae_top_20_test_accuracy": 0.8615999999999999,
|
111 |
+
"sae_top_50_test_accuracy": 0.8834,
|
112 |
+
"sae_top_100_test_accuracy": null
|
113 |
+
},
|
114 |
+
{
|
115 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
116 |
+
"llm_test_accuracy": 0.8695999999999999,
|
117 |
+
"llm_top_1_test_accuracy": 0.617,
|
118 |
+
"llm_top_2_test_accuracy": 0.6884,
|
119 |
+
"llm_top_5_test_accuracy": 0.738,
|
120 |
+
"llm_top_10_test_accuracy": 0.7702,
|
121 |
+
"llm_top_20_test_accuracy": 0.7857999999999999,
|
122 |
+
"llm_top_50_test_accuracy": 0.826,
|
123 |
+
"llm_top_100_test_accuracy": null,
|
124 |
+
"sae_test_accuracy": 0.8722000479698181,
|
125 |
+
"sae_top_1_test_accuracy": 0.6854000000000001,
|
126 |
+
"sae_top_2_test_accuracy": 0.724,
|
127 |
+
"sae_top_5_test_accuracy": 0.7802,
|
128 |
+
"sae_top_10_test_accuracy": 0.8024000000000001,
|
129 |
+
"sae_top_20_test_accuracy": 0.8245999999999999,
|
130 |
+
"sae_top_50_test_accuracy": 0.8379999999999999,
|
131 |
+
"sae_top_100_test_accuracy": null
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
135 |
+
"llm_test_accuracy": 0.8915,
|
136 |
+
"llm_top_1_test_accuracy": 0.64,
|
137 |
+
"llm_top_2_test_accuracy": 0.717,
|
138 |
+
"llm_top_5_test_accuracy": 0.789,
|
139 |
+
"llm_top_10_test_accuracy": 0.787,
|
140 |
+
"llm_top_20_test_accuracy": 0.813,
|
141 |
+
"llm_top_50_test_accuracy": 0.856,
|
142 |
+
"llm_top_100_test_accuracy": null,
|
143 |
+
"sae_test_accuracy": 0.901000052690506,
|
144 |
+
"sae_top_1_test_accuracy": 0.63,
|
145 |
+
"sae_top_2_test_accuracy": 0.655,
|
146 |
+
"sae_top_5_test_accuracy": 0.707,
|
147 |
+
"sae_top_10_test_accuracy": 0.815,
|
148 |
+
"sae_top_20_test_accuracy": 0.851,
|
149 |
+
"sae_top_50_test_accuracy": 0.871,
|
150 |
+
"sae_top_100_test_accuracy": null
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"dataset_name": "codeparrot/github-code_results",
|
154 |
+
"llm_test_accuracy": 0.9621999999999999,
|
155 |
+
"llm_top_1_test_accuracy": 0.6902,
|
156 |
+
"llm_top_2_test_accuracy": 0.7020000000000001,
|
157 |
+
"llm_top_5_test_accuracy": 0.7926,
|
158 |
+
"llm_top_10_test_accuracy": 0.8710000000000001,
|
159 |
+
"llm_top_20_test_accuracy": 0.9044000000000001,
|
160 |
+
"llm_top_50_test_accuracy": 0.9266,
|
161 |
+
"llm_top_100_test_accuracy": null,
|
162 |
+
"sae_test_accuracy": 0.9660000324249267,
|
163 |
+
"sae_top_1_test_accuracy": 0.7046,
|
164 |
+
"sae_top_2_test_accuracy": 0.7864,
|
165 |
+
"sae_top_5_test_accuracy": 0.8884000000000001,
|
166 |
+
"sae_top_10_test_accuracy": 0.9188000000000001,
|
167 |
+
"sae_top_20_test_accuracy": 0.9368000000000001,
|
168 |
+
"sae_top_50_test_accuracy": 0.9454,
|
169 |
+
"sae_top_100_test_accuracy": null
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
173 |
+
"llm_test_accuracy": 0.927,
|
174 |
+
"llm_top_1_test_accuracy": 0.6825000000000001,
|
175 |
+
"llm_top_2_test_accuracy": 0.7164999999999999,
|
176 |
+
"llm_top_5_test_accuracy": 0.83575,
|
177 |
+
"llm_top_10_test_accuracy": 0.8674999999999999,
|
178 |
+
"llm_top_20_test_accuracy": 0.895,
|
179 |
+
"llm_top_50_test_accuracy": 0.9149999999999999,
|
180 |
+
"llm_top_100_test_accuracy": null,
|
181 |
+
"sae_test_accuracy": 0.9410000443458557,
|
182 |
+
"sae_top_1_test_accuracy": 0.809,
|
183 |
+
"sae_top_2_test_accuracy": 0.8262499999999999,
|
184 |
+
"sae_top_5_test_accuracy": 0.8714999999999999,
|
185 |
+
"sae_top_10_test_accuracy": 0.887,
|
186 |
+
"sae_top_20_test_accuracy": 0.9117500000000001,
|
187 |
+
"sae_top_50_test_accuracy": 0.9255,
|
188 |
+
"sae_top_100_test_accuracy": null
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
192 |
+
"llm_test_accuracy": 0.9998000000000001,
|
193 |
+
"llm_top_1_test_accuracy": 0.6772,
|
194 |
+
"llm_top_2_test_accuracy": 0.8932,
|
195 |
+
"llm_top_5_test_accuracy": 0.994,
|
196 |
+
"llm_top_10_test_accuracy": 0.9968,
|
197 |
+
"llm_top_20_test_accuracy": 0.9982,
|
198 |
+
"llm_top_50_test_accuracy": 0.9992000000000001,
|
199 |
+
"llm_top_100_test_accuracy": null,
|
200 |
+
"sae_test_accuracy": 0.9986000657081604,
|
201 |
+
"sae_top_1_test_accuracy": 0.9292,
|
202 |
+
"sae_top_2_test_accuracy": 0.9890000000000001,
|
203 |
+
"sae_top_5_test_accuracy": 0.993,
|
204 |
+
"sae_top_10_test_accuracy": 0.9985999999999999,
|
205 |
+
"sae_top_20_test_accuracy": 0.9982,
|
206 |
+
"sae_top_50_test_accuracy": 0.999,
|
207 |
+
"sae_top_100_test_accuracy": null
|
208 |
+
}
|
209 |
+
],
|
210 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
211 |
+
"sae_lens_id": "custom_sae",
|
212 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAETrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_16",
|
213 |
+
"sae_lens_version": "5.3.0",
|
214 |
+
"sae_cfg_dict": {
|
215 |
+
"model_name": "pythia-160m-deduped",
|
216 |
+
"d_in": 768,
|
217 |
+
"d_sae": 4096,
|
218 |
+
"hook_layer": 8,
|
219 |
+
"hook_name": "blocks.8.hook_resid_post",
|
220 |
+
"context_size": null,
|
221 |
+
"hook_head_index": null,
|
222 |
+
"architecture": "gated",
|
223 |
+
"apply_b_dec_to_input": null,
|
224 |
+
"finetuning_scaling_factor": null,
|
225 |
+
"activation_fn_str": "",
|
226 |
+
"prepend_bos": true,
|
227 |
+
"normalize_activations": "none",
|
228 |
+
"dtype": "float32",
|
229 |
+
"device": "",
|
230 |
+
"dataset_path": "",
|
231 |
+
"dataset_trust_remote_code": true,
|
232 |
+
"seqpos_slice": [
|
233 |
+
null
|
234 |
+
],
|
235 |
+
"training_tokens": 499998720,
|
236 |
+
"sae_lens_training_version": null,
|
237 |
+
"neuronpedia_id": null
|
238 |
+
},
|
239 |
+
"eval_result_unstructured": null
|
240 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAE_pythia-160m-deduped__0108_resid_post_layer_8_trainer_17_eval_results.json
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5,
|
26 |
+
10,
|
27 |
+
20,
|
28 |
+
50
|
29 |
+
],
|
30 |
+
"lower_vram_usage": false
|
31 |
+
},
|
32 |
+
"eval_id": "4119b2c2-7924-4e35-a168-92c714107478",
|
33 |
+
"datetime_epoch_millis": 1736495821845,
|
34 |
+
"eval_result_metrics": {
|
35 |
+
"llm": {
|
36 |
+
"llm_test_accuracy": 0.9292125,
|
37 |
+
"llm_top_1_test_accuracy": 0.6425625,
|
38 |
+
"llm_top_2_test_accuracy": 0.7349125000000001,
|
39 |
+
"llm_top_5_test_accuracy": 0.8187187499999999,
|
40 |
+
"llm_top_10_test_accuracy": 0.8505625000000001,
|
41 |
+
"llm_top_20_test_accuracy": 0.8723249999999999,
|
42 |
+
"llm_top_50_test_accuracy": 0.9006,
|
43 |
+
"llm_top_100_test_accuracy": null
|
44 |
+
},
|
45 |
+
"sae": {
|
46 |
+
"sae_test_accuracy": 0.9306125409901143,
|
47 |
+
"sae_top_1_test_accuracy": 0.756175,
|
48 |
+
"sae_top_2_test_accuracy": 0.78300625,
|
49 |
+
"sae_top_5_test_accuracy": 0.83600625,
|
50 |
+
"sae_top_10_test_accuracy": 0.86741875,
|
51 |
+
"sae_top_20_test_accuracy": 0.8945437500000001,
|
52 |
+
"sae_top_50_test_accuracy": 0.91255,
|
53 |
+
"sae_top_100_test_accuracy": null
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"eval_result_details": [
|
57 |
+
{
|
58 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
59 |
+
"llm_test_accuracy": 0.9512,
|
60 |
+
"llm_top_1_test_accuracy": 0.6045999999999999,
|
61 |
+
"llm_top_2_test_accuracy": 0.74,
|
62 |
+
"llm_top_5_test_accuracy": 0.7994,
|
63 |
+
"llm_top_10_test_accuracy": 0.8628,
|
64 |
+
"llm_top_20_test_accuracy": 0.8812000000000001,
|
65 |
+
"llm_top_50_test_accuracy": 0.921,
|
66 |
+
"llm_top_100_test_accuracy": null,
|
67 |
+
"sae_test_accuracy": 0.9468000411987305,
|
68 |
+
"sae_top_1_test_accuracy": 0.7807999999999999,
|
69 |
+
"sae_top_2_test_accuracy": 0.7670000000000001,
|
70 |
+
"sae_top_5_test_accuracy": 0.8390000000000001,
|
71 |
+
"sae_top_10_test_accuracy": 0.9,
|
72 |
+
"sae_top_20_test_accuracy": 0.9136,
|
73 |
+
"sae_top_50_test_accuracy": 0.9384,
|
74 |
+
"sae_top_100_test_accuracy": null
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
78 |
+
"llm_test_accuracy": 0.9308,
|
79 |
+
"llm_top_1_test_accuracy": 0.5818,
|
80 |
+
"llm_top_2_test_accuracy": 0.7078,
|
81 |
+
"llm_top_5_test_accuracy": 0.8074,
|
82 |
+
"llm_top_10_test_accuracy": 0.8343999999999999,
|
83 |
+
"llm_top_20_test_accuracy": 0.8556000000000001,
|
84 |
+
"llm_top_50_test_accuracy": 0.8896000000000001,
|
85 |
+
"llm_top_100_test_accuracy": null,
|
86 |
+
"sae_test_accuracy": 0.9276000380516052,
|
87 |
+
"sae_top_1_test_accuracy": 0.7468,
|
88 |
+
"sae_top_2_test_accuracy": 0.7718,
|
89 |
+
"sae_top_5_test_accuracy": 0.8165999999999999,
|
90 |
+
"sae_top_10_test_accuracy": 0.865,
|
91 |
+
"sae_top_20_test_accuracy": 0.8892,
|
92 |
+
"sae_top_50_test_accuracy": 0.9087999999999999,
|
93 |
+
"sae_top_100_test_accuracy": null
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
97 |
+
"llm_test_accuracy": 0.9016,
|
98 |
+
"llm_top_1_test_accuracy": 0.6472,
|
99 |
+
"llm_top_2_test_accuracy": 0.7143999999999999,
|
100 |
+
"llm_top_5_test_accuracy": 0.7936,
|
101 |
+
"llm_top_10_test_accuracy": 0.8148,
|
102 |
+
"llm_top_20_test_accuracy": 0.8454,
|
103 |
+
"llm_top_50_test_accuracy": 0.8714000000000001,
|
104 |
+
"llm_top_100_test_accuracy": null,
|
105 |
+
"sae_test_accuracy": 0.9058000326156617,
|
106 |
+
"sae_top_1_test_accuracy": 0.7236,
|
107 |
+
"sae_top_2_test_accuracy": 0.759,
|
108 |
+
"sae_top_5_test_accuracy": 0.8354000000000001,
|
109 |
+
"sae_top_10_test_accuracy": 0.8430000000000002,
|
110 |
+
"sae_top_20_test_accuracy": 0.8714000000000001,
|
111 |
+
"sae_top_50_test_accuracy": 0.8838000000000001,
|
112 |
+
"sae_top_100_test_accuracy": null
|
113 |
+
},
|
114 |
+
{
|
115 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
116 |
+
"llm_test_accuracy": 0.8695999999999999,
|
117 |
+
"llm_top_1_test_accuracy": 0.617,
|
118 |
+
"llm_top_2_test_accuracy": 0.6884,
|
119 |
+
"llm_top_5_test_accuracy": 0.738,
|
120 |
+
"llm_top_10_test_accuracy": 0.7702,
|
121 |
+
"llm_top_20_test_accuracy": 0.7857999999999999,
|
122 |
+
"llm_top_50_test_accuracy": 0.826,
|
123 |
+
"llm_top_100_test_accuracy": null,
|
124 |
+
"sae_test_accuracy": 0.8688000559806823,
|
125 |
+
"sae_top_1_test_accuracy": 0.7022,
|
126 |
+
"sae_top_2_test_accuracy": 0.7252000000000001,
|
127 |
+
"sae_top_5_test_accuracy": 0.7604,
|
128 |
+
"sae_top_10_test_accuracy": 0.7827999999999999,
|
129 |
+
"sae_top_20_test_accuracy": 0.8134,
|
130 |
+
"sae_top_50_test_accuracy": 0.8356,
|
131 |
+
"sae_top_100_test_accuracy": null
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
135 |
+
"llm_test_accuracy": 0.8915,
|
136 |
+
"llm_top_1_test_accuracy": 0.64,
|
137 |
+
"llm_top_2_test_accuracy": 0.717,
|
138 |
+
"llm_top_5_test_accuracy": 0.789,
|
139 |
+
"llm_top_10_test_accuracy": 0.787,
|
140 |
+
"llm_top_20_test_accuracy": 0.813,
|
141 |
+
"llm_top_50_test_accuracy": 0.856,
|
142 |
+
"llm_top_100_test_accuracy": null,
|
143 |
+
"sae_test_accuracy": 0.8945000469684601,
|
144 |
+
"sae_top_1_test_accuracy": 0.643,
|
145 |
+
"sae_top_2_test_accuracy": 0.665,
|
146 |
+
"sae_top_5_test_accuracy": 0.736,
|
147 |
+
"sae_top_10_test_accuracy": 0.745,
|
148 |
+
"sae_top_20_test_accuracy": 0.829,
|
149 |
+
"sae_top_50_test_accuracy": 0.868,
|
150 |
+
"sae_top_100_test_accuracy": null
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"dataset_name": "codeparrot/github-code_results",
|
154 |
+
"llm_test_accuracy": 0.9621999999999999,
|
155 |
+
"llm_top_1_test_accuracy": 0.6902,
|
156 |
+
"llm_top_2_test_accuracy": 0.7020000000000001,
|
157 |
+
"llm_top_5_test_accuracy": 0.7926,
|
158 |
+
"llm_top_10_test_accuracy": 0.8710000000000001,
|
159 |
+
"llm_top_20_test_accuracy": 0.9044000000000001,
|
160 |
+
"llm_top_50_test_accuracy": 0.9266,
|
161 |
+
"llm_top_100_test_accuracy": null,
|
162 |
+
"sae_test_accuracy": 0.9610000491142273,
|
163 |
+
"sae_top_1_test_accuracy": 0.7008,
|
164 |
+
"sae_top_2_test_accuracy": 0.781,
|
165 |
+
"sae_top_5_test_accuracy": 0.8629999999999999,
|
166 |
+
"sae_top_10_test_accuracy": 0.915,
|
167 |
+
"sae_top_20_test_accuracy": 0.9328,
|
168 |
+
"sae_top_50_test_accuracy": 0.9461999999999999,
|
169 |
+
"sae_top_100_test_accuracy": null
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
173 |
+
"llm_test_accuracy": 0.927,
|
174 |
+
"llm_top_1_test_accuracy": 0.6825000000000001,
|
175 |
+
"llm_top_2_test_accuracy": 0.7164999999999999,
|
176 |
+
"llm_top_5_test_accuracy": 0.83575,
|
177 |
+
"llm_top_10_test_accuracy": 0.8674999999999999,
|
178 |
+
"llm_top_20_test_accuracy": 0.895,
|
179 |
+
"llm_top_50_test_accuracy": 0.9149999999999999,
|
180 |
+
"llm_top_100_test_accuracy": null,
|
181 |
+
"sae_test_accuracy": 0.9410000443458557,
|
182 |
+
"sae_top_1_test_accuracy": 0.767,
|
183 |
+
"sae_top_2_test_accuracy": 0.8062499999999999,
|
184 |
+
"sae_top_5_test_accuracy": 0.84425,
|
185 |
+
"sae_top_10_test_accuracy": 0.89075,
|
186 |
+
"sae_top_20_test_accuracy": 0.90875,
|
187 |
+
"sae_top_50_test_accuracy": 0.921,
|
188 |
+
"sae_top_100_test_accuracy": null
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
192 |
+
"llm_test_accuracy": 0.9998000000000001,
|
193 |
+
"llm_top_1_test_accuracy": 0.6772,
|
194 |
+
"llm_top_2_test_accuracy": 0.8932,
|
195 |
+
"llm_top_5_test_accuracy": 0.994,
|
196 |
+
"llm_top_10_test_accuracy": 0.9968,
|
197 |
+
"llm_top_20_test_accuracy": 0.9982,
|
198 |
+
"llm_top_50_test_accuracy": 0.9992000000000001,
|
199 |
+
"llm_top_100_test_accuracy": null,
|
200 |
+
"sae_test_accuracy": 0.9994000196456909,
|
201 |
+
"sae_top_1_test_accuracy": 0.9852000000000001,
|
202 |
+
"sae_top_2_test_accuracy": 0.9888,
|
203 |
+
"sae_top_5_test_accuracy": 0.9934000000000001,
|
204 |
+
"sae_top_10_test_accuracy": 0.9978,
|
205 |
+
"sae_top_20_test_accuracy": 0.9982,
|
206 |
+
"sae_top_50_test_accuracy": 0.9985999999999999,
|
207 |
+
"sae_top_100_test_accuracy": null
|
208 |
+
}
|
209 |
+
],
|
210 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
211 |
+
"sae_lens_id": "custom_sae",
|
212 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_GatedSAETrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_17",
|
213 |
+
"sae_lens_version": "5.3.0",
|
214 |
+
"sae_cfg_dict": {
|
215 |
+
"model_name": "pythia-160m-deduped",
|
216 |
+
"d_in": 768,
|
217 |
+
"d_sae": 4096,
|
218 |
+
"hook_layer": 8,
|
219 |
+
"hook_name": "blocks.8.hook_resid_post",
|
220 |
+
"context_size": null,
|
221 |
+
"hook_head_index": null,
|
222 |
+
"architecture": "gated",
|
223 |
+
"apply_b_dec_to_input": null,
|
224 |
+
"finetuning_scaling_factor": null,
|
225 |
+
"activation_fn_str": "",
|
226 |
+
"prepend_bos": true,
|
227 |
+
"normalize_activations": "none",
|
228 |
+
"dtype": "float32",
|
229 |
+
"device": "",
|
230 |
+
"dataset_path": "",
|
231 |
+
"dataset_trust_remote_code": true,
|
232 |
+
"seqpos_slice": [
|
233 |
+
null
|
234 |
+
],
|
235 |
+
"training_tokens": 499998720,
|
236 |
+
"sae_lens_training_version": null,
|
237 |
+
"neuronpedia_id": null
|
238 |
+
},
|
239 |
+
"eval_result_unstructured": null
|
240 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_30_eval_results.json
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5,
|
26 |
+
10,
|
27 |
+
20,
|
28 |
+
50
|
29 |
+
],
|
30 |
+
"lower_vram_usage": false
|
31 |
+
},
|
32 |
+
"eval_id": "9564d683-42ec-4dab-8b81-61d54d2fc1e8",
|
33 |
+
"datetime_epoch_millis": 1736496246045,
|
34 |
+
"eval_result_metrics": {
|
35 |
+
"llm": {
|
36 |
+
"llm_test_accuracy": 0.9292125,
|
37 |
+
"llm_top_1_test_accuracy": 0.6425625,
|
38 |
+
"llm_top_2_test_accuracy": 0.7349125000000001,
|
39 |
+
"llm_top_5_test_accuracy": 0.8187187499999999,
|
40 |
+
"llm_top_10_test_accuracy": 0.8505625000000001,
|
41 |
+
"llm_top_20_test_accuracy": 0.8723249999999999,
|
42 |
+
"llm_top_50_test_accuracy": 0.9006,
|
43 |
+
"llm_top_100_test_accuracy": null
|
44 |
+
},
|
45 |
+
"sae": {
|
46 |
+
"sae_test_accuracy": 0.9272125441581011,
|
47 |
+
"sae_top_1_test_accuracy": 0.7294437500000001,
|
48 |
+
"sae_top_2_test_accuracy": 0.76516875,
|
49 |
+
"sae_top_5_test_accuracy": 0.8277375,
|
50 |
+
"sae_top_10_test_accuracy": 0.86786875,
|
51 |
+
"sae_top_20_test_accuracy": 0.8947749999999999,
|
52 |
+
"sae_top_50_test_accuracy": 0.9133812499999999,
|
53 |
+
"sae_top_100_test_accuracy": null
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"eval_result_details": [
|
57 |
+
{
|
58 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
59 |
+
"llm_test_accuracy": 0.9512,
|
60 |
+
"llm_top_1_test_accuracy": 0.6045999999999999,
|
61 |
+
"llm_top_2_test_accuracy": 0.74,
|
62 |
+
"llm_top_5_test_accuracy": 0.7994,
|
63 |
+
"llm_top_10_test_accuracy": 0.8628,
|
64 |
+
"llm_top_20_test_accuracy": 0.8812000000000001,
|
65 |
+
"llm_top_50_test_accuracy": 0.921,
|
66 |
+
"llm_top_100_test_accuracy": null,
|
67 |
+
"sae_test_accuracy": 0.9420000553131104,
|
68 |
+
"sae_top_1_test_accuracy": 0.7216000000000001,
|
69 |
+
"sae_top_2_test_accuracy": 0.7220000000000001,
|
70 |
+
"sae_top_5_test_accuracy": 0.8423999999999999,
|
71 |
+
"sae_top_10_test_accuracy": 0.8901999999999999,
|
72 |
+
"sae_top_20_test_accuracy": 0.9236000000000001,
|
73 |
+
"sae_top_50_test_accuracy": 0.9410000000000001,
|
74 |
+
"sae_top_100_test_accuracy": null
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
78 |
+
"llm_test_accuracy": 0.9308,
|
79 |
+
"llm_top_1_test_accuracy": 0.5818,
|
80 |
+
"llm_top_2_test_accuracy": 0.7078,
|
81 |
+
"llm_top_5_test_accuracy": 0.8074,
|
82 |
+
"llm_top_10_test_accuracy": 0.8343999999999999,
|
83 |
+
"llm_top_20_test_accuracy": 0.8556000000000001,
|
84 |
+
"llm_top_50_test_accuracy": 0.8896000000000001,
|
85 |
+
"llm_top_100_test_accuracy": null,
|
86 |
+
"sae_test_accuracy": 0.9216000318527222,
|
87 |
+
"sae_top_1_test_accuracy": 0.7470000000000001,
|
88 |
+
"sae_top_2_test_accuracy": 0.7502,
|
89 |
+
"sae_top_5_test_accuracy": 0.8126,
|
90 |
+
"sae_top_10_test_accuracy": 0.857,
|
91 |
+
"sae_top_20_test_accuracy": 0.8932,
|
92 |
+
"sae_top_50_test_accuracy": 0.9118,
|
93 |
+
"sae_top_100_test_accuracy": null
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
97 |
+
"llm_test_accuracy": 0.9016,
|
98 |
+
"llm_top_1_test_accuracy": 0.6472,
|
99 |
+
"llm_top_2_test_accuracy": 0.7143999999999999,
|
100 |
+
"llm_top_5_test_accuracy": 0.7936,
|
101 |
+
"llm_top_10_test_accuracy": 0.8148,
|
102 |
+
"llm_top_20_test_accuracy": 0.8454,
|
103 |
+
"llm_top_50_test_accuracy": 0.8714000000000001,
|
104 |
+
"llm_top_100_test_accuracy": null,
|
105 |
+
"sae_test_accuracy": 0.9062000632286071,
|
106 |
+
"sae_top_1_test_accuracy": 0.6664000000000001,
|
107 |
+
"sae_top_2_test_accuracy": 0.7468,
|
108 |
+
"sae_top_5_test_accuracy": 0.7864,
|
109 |
+
"sae_top_10_test_accuracy": 0.8400000000000001,
|
110 |
+
"sae_top_20_test_accuracy": 0.8597999999999999,
|
111 |
+
"sae_top_50_test_accuracy": 0.8842000000000001,
|
112 |
+
"sae_top_100_test_accuracy": null
|
113 |
+
},
|
114 |
+
{
|
115 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
116 |
+
"llm_test_accuracy": 0.8695999999999999,
|
117 |
+
"llm_top_1_test_accuracy": 0.617,
|
118 |
+
"llm_top_2_test_accuracy": 0.6884,
|
119 |
+
"llm_top_5_test_accuracy": 0.738,
|
120 |
+
"llm_top_10_test_accuracy": 0.7702,
|
121 |
+
"llm_top_20_test_accuracy": 0.7857999999999999,
|
122 |
+
"llm_top_50_test_accuracy": 0.826,
|
123 |
+
"llm_top_100_test_accuracy": null,
|
124 |
+
"sae_test_accuracy": 0.8660000443458558,
|
125 |
+
"sae_top_1_test_accuracy": 0.7255999999999999,
|
126 |
+
"sae_top_2_test_accuracy": 0.7594000000000001,
|
127 |
+
"sae_top_5_test_accuracy": 0.7798,
|
128 |
+
"sae_top_10_test_accuracy": 0.8109999999999999,
|
129 |
+
"sae_top_20_test_accuracy": 0.8219999999999998,
|
130 |
+
"sae_top_50_test_accuracy": 0.8305999999999999,
|
131 |
+
"sae_top_100_test_accuracy": null
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
135 |
+
"llm_test_accuracy": 0.8915,
|
136 |
+
"llm_top_1_test_accuracy": 0.64,
|
137 |
+
"llm_top_2_test_accuracy": 0.717,
|
138 |
+
"llm_top_5_test_accuracy": 0.789,
|
139 |
+
"llm_top_10_test_accuracy": 0.787,
|
140 |
+
"llm_top_20_test_accuracy": 0.813,
|
141 |
+
"llm_top_50_test_accuracy": 0.856,
|
142 |
+
"llm_top_100_test_accuracy": null,
|
143 |
+
"sae_test_accuracy": 0.8850000500679016,
|
144 |
+
"sae_top_1_test_accuracy": 0.58,
|
145 |
+
"sae_top_2_test_accuracy": 0.579,
|
146 |
+
"sae_top_5_test_accuracy": 0.654,
|
147 |
+
"sae_top_10_test_accuracy": 0.746,
|
148 |
+
"sae_top_20_test_accuracy": 0.827,
|
149 |
+
"sae_top_50_test_accuracy": 0.865,
|
150 |
+
"sae_top_100_test_accuracy": null
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"dataset_name": "codeparrot/github-code_results",
|
154 |
+
"llm_test_accuracy": 0.9621999999999999,
|
155 |
+
"llm_top_1_test_accuracy": 0.6902,
|
156 |
+
"llm_top_2_test_accuracy": 0.7020000000000001,
|
157 |
+
"llm_top_5_test_accuracy": 0.7926,
|
158 |
+
"llm_top_10_test_accuracy": 0.8710000000000001,
|
159 |
+
"llm_top_20_test_accuracy": 0.9044000000000001,
|
160 |
+
"llm_top_50_test_accuracy": 0.9266,
|
161 |
+
"llm_top_100_test_accuracy": null,
|
162 |
+
"sae_test_accuracy": 0.9618000388145447,
|
163 |
+
"sae_top_1_test_accuracy": 0.6971999999999999,
|
164 |
+
"sae_top_2_test_accuracy": 0.7744,
|
165 |
+
"sae_top_5_test_accuracy": 0.8894,
|
166 |
+
"sae_top_10_test_accuracy": 0.9154,
|
167 |
+
"sae_top_20_test_accuracy": 0.9269999999999999,
|
168 |
+
"sae_top_50_test_accuracy": 0.9516,
|
169 |
+
"sae_top_100_test_accuracy": null
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
173 |
+
"llm_test_accuracy": 0.927,
|
174 |
+
"llm_top_1_test_accuracy": 0.6825000000000001,
|
175 |
+
"llm_top_2_test_accuracy": 0.7164999999999999,
|
176 |
+
"llm_top_5_test_accuracy": 0.83575,
|
177 |
+
"llm_top_10_test_accuracy": 0.8674999999999999,
|
178 |
+
"llm_top_20_test_accuracy": 0.895,
|
179 |
+
"llm_top_50_test_accuracy": 0.9149999999999999,
|
180 |
+
"llm_top_100_test_accuracy": null,
|
181 |
+
"sae_test_accuracy": 0.9355000406503677,
|
182 |
+
"sae_top_1_test_accuracy": 0.73575,
|
183 |
+
"sae_top_2_test_accuracy": 0.80775,
|
184 |
+
"sae_top_5_test_accuracy": 0.8634999999999999,
|
185 |
+
"sae_top_10_test_accuracy": 0.88475,
|
186 |
+
"sae_top_20_test_accuracy": 0.9069999999999999,
|
187 |
+
"sae_top_50_test_accuracy": 0.92425,
|
188 |
+
"sae_top_100_test_accuracy": null
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
192 |
+
"llm_test_accuracy": 0.9998000000000001,
|
193 |
+
"llm_top_1_test_accuracy": 0.6772,
|
194 |
+
"llm_top_2_test_accuracy": 0.8932,
|
195 |
+
"llm_top_5_test_accuracy": 0.994,
|
196 |
+
"llm_top_10_test_accuracy": 0.9968,
|
197 |
+
"llm_top_20_test_accuracy": 0.9982,
|
198 |
+
"llm_top_50_test_accuracy": 0.9992000000000001,
|
199 |
+
"llm_top_100_test_accuracy": null,
|
200 |
+
"sae_test_accuracy": 0.9996000289916992,
|
201 |
+
"sae_top_1_test_accuracy": 0.962,
|
202 |
+
"sae_top_2_test_accuracy": 0.9818,
|
203 |
+
"sae_top_5_test_accuracy": 0.9937999999999999,
|
204 |
+
"sae_top_10_test_accuracy": 0.9985999999999999,
|
205 |
+
"sae_top_20_test_accuracy": 0.9986,
|
206 |
+
"sae_top_50_test_accuracy": 0.9986,
|
207 |
+
"sae_top_100_test_accuracy": null
|
208 |
+
}
|
209 |
+
],
|
210 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
211 |
+
"sae_lens_id": "custom_sae",
|
212 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpReluTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_30",
|
213 |
+
"sae_lens_version": "5.3.0",
|
214 |
+
"sae_cfg_dict": {
|
215 |
+
"model_name": "pythia-160m-deduped",
|
216 |
+
"d_in": 768,
|
217 |
+
"d_sae": 4096,
|
218 |
+
"hook_layer": 8,
|
219 |
+
"hook_name": "blocks.8.hook_resid_post",
|
220 |
+
"context_size": null,
|
221 |
+
"hook_head_index": null,
|
222 |
+
"architecture": "jumprelu",
|
223 |
+
"apply_b_dec_to_input": null,
|
224 |
+
"finetuning_scaling_factor": null,
|
225 |
+
"activation_fn_str": "",
|
226 |
+
"prepend_bos": true,
|
227 |
+
"normalize_activations": "none",
|
228 |
+
"dtype": "float32",
|
229 |
+
"device": "",
|
230 |
+
"dataset_path": "",
|
231 |
+
"dataset_trust_remote_code": true,
|
232 |
+
"seqpos_slice": [
|
233 |
+
null
|
234 |
+
],
|
235 |
+
"training_tokens": 499998720,
|
236 |
+
"sae_lens_training_version": null,
|
237 |
+
"neuronpedia_id": null
|
238 |
+
},
|
239 |
+
"eval_result_unstructured": null
|
240 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_31_eval_results.json
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5,
|
26 |
+
10,
|
27 |
+
20,
|
28 |
+
50
|
29 |
+
],
|
30 |
+
"lower_vram_usage": false
|
31 |
+
},
|
32 |
+
"eval_id": "d7a93d82-f099-45cf-b22d-7e459c41347a",
|
33 |
+
"datetime_epoch_millis": 1736496658438,
|
34 |
+
"eval_result_metrics": {
|
35 |
+
"llm": {
|
36 |
+
"llm_test_accuracy": 0.9292125,
|
37 |
+
"llm_top_1_test_accuracy": 0.6425625,
|
38 |
+
"llm_top_2_test_accuracy": 0.7349125000000001,
|
39 |
+
"llm_top_5_test_accuracy": 0.8187187499999999,
|
40 |
+
"llm_top_10_test_accuracy": 0.8505625000000001,
|
41 |
+
"llm_top_20_test_accuracy": 0.8723249999999999,
|
42 |
+
"llm_top_50_test_accuracy": 0.9006,
|
43 |
+
"llm_top_100_test_accuracy": null
|
44 |
+
},
|
45 |
+
"sae": {
|
46 |
+
"sae_test_accuracy": 0.9325812891125679,
|
47 |
+
"sae_top_1_test_accuracy": 0.74750625,
|
48 |
+
"sae_top_2_test_accuracy": 0.7931125,
|
49 |
+
"sae_top_5_test_accuracy": 0.8508625,
|
50 |
+
"sae_top_10_test_accuracy": 0.87788125,
|
51 |
+
"sae_top_20_test_accuracy": 0.90069375,
|
52 |
+
"sae_top_50_test_accuracy": 0.9127,
|
53 |
+
"sae_top_100_test_accuracy": null
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"eval_result_details": [
|
57 |
+
{
|
58 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
59 |
+
"llm_test_accuracy": 0.9512,
|
60 |
+
"llm_top_1_test_accuracy": 0.6045999999999999,
|
61 |
+
"llm_top_2_test_accuracy": 0.74,
|
62 |
+
"llm_top_5_test_accuracy": 0.7994,
|
63 |
+
"llm_top_10_test_accuracy": 0.8628,
|
64 |
+
"llm_top_20_test_accuracy": 0.8812000000000001,
|
65 |
+
"llm_top_50_test_accuracy": 0.921,
|
66 |
+
"llm_top_100_test_accuracy": null,
|
67 |
+
"sae_test_accuracy": 0.953600037097931,
|
68 |
+
"sae_top_1_test_accuracy": 0.7956,
|
69 |
+
"sae_top_2_test_accuracy": 0.7924,
|
70 |
+
"sae_top_5_test_accuracy": 0.8492,
|
71 |
+
"sae_top_10_test_accuracy": 0.8959999999999999,
|
72 |
+
"sae_top_20_test_accuracy": 0.9113999999999999,
|
73 |
+
"sae_top_50_test_accuracy": 0.9362,
|
74 |
+
"sae_top_100_test_accuracy": null
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
78 |
+
"llm_test_accuracy": 0.9308,
|
79 |
+
"llm_top_1_test_accuracy": 0.5818,
|
80 |
+
"llm_top_2_test_accuracy": 0.7078,
|
81 |
+
"llm_top_5_test_accuracy": 0.8074,
|
82 |
+
"llm_top_10_test_accuracy": 0.8343999999999999,
|
83 |
+
"llm_top_20_test_accuracy": 0.8556000000000001,
|
84 |
+
"llm_top_50_test_accuracy": 0.8896000000000001,
|
85 |
+
"llm_top_100_test_accuracy": null,
|
86 |
+
"sae_test_accuracy": 0.9204000353813171,
|
87 |
+
"sae_top_1_test_accuracy": 0.6724,
|
88 |
+
"sae_top_2_test_accuracy": 0.7828,
|
89 |
+
"sae_top_5_test_accuracy": 0.82,
|
90 |
+
"sae_top_10_test_accuracy": 0.8470000000000001,
|
91 |
+
"sae_top_20_test_accuracy": 0.8948,
|
92 |
+
"sae_top_50_test_accuracy": 0.9062000000000001,
|
93 |
+
"sae_top_100_test_accuracy": null
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
97 |
+
"llm_test_accuracy": 0.9016,
|
98 |
+
"llm_top_1_test_accuracy": 0.6472,
|
99 |
+
"llm_top_2_test_accuracy": 0.7143999999999999,
|
100 |
+
"llm_top_5_test_accuracy": 0.7936,
|
101 |
+
"llm_top_10_test_accuracy": 0.8148,
|
102 |
+
"llm_top_20_test_accuracy": 0.8454,
|
103 |
+
"llm_top_50_test_accuracy": 0.8714000000000001,
|
104 |
+
"llm_top_100_test_accuracy": null,
|
105 |
+
"sae_test_accuracy": 0.9130000472068787,
|
106 |
+
"sae_top_1_test_accuracy": 0.6962,
|
107 |
+
"sae_top_2_test_accuracy": 0.7486,
|
108 |
+
"sae_top_5_test_accuracy": 0.807,
|
109 |
+
"sae_top_10_test_accuracy": 0.8193999999999999,
|
110 |
+
"sae_top_20_test_accuracy": 0.8632,
|
111 |
+
"sae_top_50_test_accuracy": 0.8838000000000001,
|
112 |
+
"sae_top_100_test_accuracy": null
|
113 |
+
},
|
114 |
+
{
|
115 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
116 |
+
"llm_test_accuracy": 0.8695999999999999,
|
117 |
+
"llm_top_1_test_accuracy": 0.617,
|
118 |
+
"llm_top_2_test_accuracy": 0.6884,
|
119 |
+
"llm_top_5_test_accuracy": 0.738,
|
120 |
+
"llm_top_10_test_accuracy": 0.7702,
|
121 |
+
"llm_top_20_test_accuracy": 0.7857999999999999,
|
122 |
+
"llm_top_50_test_accuracy": 0.826,
|
123 |
+
"llm_top_100_test_accuracy": null,
|
124 |
+
"sae_test_accuracy": 0.8698000431060791,
|
125 |
+
"sae_top_1_test_accuracy": 0.729,
|
126 |
+
"sae_top_2_test_accuracy": 0.7412000000000001,
|
127 |
+
"sae_top_5_test_accuracy": 0.772,
|
128 |
+
"sae_top_10_test_accuracy": 0.8099999999999999,
|
129 |
+
"sae_top_20_test_accuracy": 0.821,
|
130 |
+
"sae_top_50_test_accuracy": 0.8335999999999999,
|
131 |
+
"sae_top_100_test_accuracy": null
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
135 |
+
"llm_test_accuracy": 0.8915,
|
136 |
+
"llm_top_1_test_accuracy": 0.64,
|
137 |
+
"llm_top_2_test_accuracy": 0.717,
|
138 |
+
"llm_top_5_test_accuracy": 0.789,
|
139 |
+
"llm_top_10_test_accuracy": 0.787,
|
140 |
+
"llm_top_20_test_accuracy": 0.813,
|
141 |
+
"llm_top_50_test_accuracy": 0.856,
|
142 |
+
"llm_top_100_test_accuracy": null,
|
143 |
+
"sae_test_accuracy": 0.8995000422000885,
|
144 |
+
"sae_top_1_test_accuracy": 0.577,
|
145 |
+
"sae_top_2_test_accuracy": 0.659,
|
146 |
+
"sae_top_5_test_accuracy": 0.822,
|
147 |
+
"sae_top_10_test_accuracy": 0.837,
|
148 |
+
"sae_top_20_test_accuracy": 0.872,
|
149 |
+
"sae_top_50_test_accuracy": 0.8725,
|
150 |
+
"sae_top_100_test_accuracy": null
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"dataset_name": "codeparrot/github-code_results",
|
154 |
+
"llm_test_accuracy": 0.9621999999999999,
|
155 |
+
"llm_top_1_test_accuracy": 0.6902,
|
156 |
+
"llm_top_2_test_accuracy": 0.7020000000000001,
|
157 |
+
"llm_top_5_test_accuracy": 0.7926,
|
158 |
+
"llm_top_10_test_accuracy": 0.8710000000000001,
|
159 |
+
"llm_top_20_test_accuracy": 0.9044000000000001,
|
160 |
+
"llm_top_50_test_accuracy": 0.9266,
|
161 |
+
"llm_top_100_test_accuracy": null,
|
162 |
+
"sae_test_accuracy": 0.9650000333786011,
|
163 |
+
"sae_top_1_test_accuracy": 0.7659999999999999,
|
164 |
+
"sae_top_2_test_accuracy": 0.8286,
|
165 |
+
"sae_top_5_test_accuracy": 0.8968,
|
166 |
+
"sae_top_10_test_accuracy": 0.9193999999999999,
|
167 |
+
"sae_top_20_test_accuracy": 0.9339999999999999,
|
168 |
+
"sae_top_50_test_accuracy": 0.9490000000000001,
|
169 |
+
"sae_top_100_test_accuracy": null
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
173 |
+
"llm_test_accuracy": 0.927,
|
174 |
+
"llm_top_1_test_accuracy": 0.6825000000000001,
|
175 |
+
"llm_top_2_test_accuracy": 0.7164999999999999,
|
176 |
+
"llm_top_5_test_accuracy": 0.83575,
|
177 |
+
"llm_top_10_test_accuracy": 0.8674999999999999,
|
178 |
+
"llm_top_20_test_accuracy": 0.895,
|
179 |
+
"llm_top_50_test_accuracy": 0.9149999999999999,
|
180 |
+
"llm_top_100_test_accuracy": null,
|
181 |
+
"sae_test_accuracy": 0.9397500455379486,
|
182 |
+
"sae_top_1_test_accuracy": 0.78225,
|
183 |
+
"sae_top_2_test_accuracy": 0.8045,
|
184 |
+
"sae_top_5_test_accuracy": 0.8485,
|
185 |
+
"sae_top_10_test_accuracy": 0.89625,
|
186 |
+
"sae_top_20_test_accuracy": 0.90975,
|
187 |
+
"sae_top_50_test_accuracy": 0.9215,
|
188 |
+
"sae_top_100_test_accuracy": null
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
192 |
+
"llm_test_accuracy": 0.9998000000000001,
|
193 |
+
"llm_top_1_test_accuracy": 0.6772,
|
194 |
+
"llm_top_2_test_accuracy": 0.8932,
|
195 |
+
"llm_top_5_test_accuracy": 0.994,
|
196 |
+
"llm_top_10_test_accuracy": 0.9968,
|
197 |
+
"llm_top_20_test_accuracy": 0.9982,
|
198 |
+
"llm_top_50_test_accuracy": 0.9992000000000001,
|
199 |
+
"llm_top_100_test_accuracy": null,
|
200 |
+
"sae_test_accuracy": 0.9996000289916992,
|
201 |
+
"sae_top_1_test_accuracy": 0.9616,
|
202 |
+
"sae_top_2_test_accuracy": 0.9878,
|
203 |
+
"sae_top_5_test_accuracy": 0.9914,
|
204 |
+
"sae_top_10_test_accuracy": 0.998,
|
205 |
+
"sae_top_20_test_accuracy": 0.9994,
|
206 |
+
"sae_top_50_test_accuracy": 0.9987999999999999,
|
207 |
+
"sae_top_100_test_accuracy": null
|
208 |
+
}
|
209 |
+
],
|
210 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
211 |
+
"sae_lens_id": "custom_sae",
|
212 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpReluTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_31",
|
213 |
+
"sae_lens_version": "5.3.0",
|
214 |
+
"sae_cfg_dict": {
|
215 |
+
"model_name": "pythia-160m-deduped",
|
216 |
+
"d_in": 768,
|
217 |
+
"d_sae": 4096,
|
218 |
+
"hook_layer": 8,
|
219 |
+
"hook_name": "blocks.8.hook_resid_post",
|
220 |
+
"context_size": null,
|
221 |
+
"hook_head_index": null,
|
222 |
+
"architecture": "jumprelu",
|
223 |
+
"apply_b_dec_to_input": null,
|
224 |
+
"finetuning_scaling_factor": null,
|
225 |
+
"activation_fn_str": "",
|
226 |
+
"prepend_bos": true,
|
227 |
+
"normalize_activations": "none",
|
228 |
+
"dtype": "float32",
|
229 |
+
"device": "",
|
230 |
+
"dataset_path": "",
|
231 |
+
"dataset_trust_remote_code": true,
|
232 |
+
"seqpos_slice": [
|
233 |
+
null
|
234 |
+
],
|
235 |
+
"training_tokens": 499998720,
|
236 |
+
"sae_lens_training_version": null,
|
237 |
+
"neuronpedia_id": null
|
238 |
+
},
|
239 |
+
"eval_result_unstructured": null
|
240 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_32_eval_results.json
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5,
|
26 |
+
10,
|
27 |
+
20,
|
28 |
+
50
|
29 |
+
],
|
30 |
+
"lower_vram_usage": false
|
31 |
+
},
|
32 |
+
"eval_id": "3e3c022a-5caf-4adb-8cb7-4a1a81a42251",
|
33 |
+
"datetime_epoch_millis": 1736497155533,
|
34 |
+
"eval_result_metrics": {
|
35 |
+
"llm": {
|
36 |
+
"llm_test_accuracy": 0.9292125,
|
37 |
+
"llm_top_1_test_accuracy": 0.6425625,
|
38 |
+
"llm_top_2_test_accuracy": 0.7349125000000001,
|
39 |
+
"llm_top_5_test_accuracy": 0.8187187499999999,
|
40 |
+
"llm_top_10_test_accuracy": 0.8505625000000001,
|
41 |
+
"llm_top_20_test_accuracy": 0.8723249999999999,
|
42 |
+
"llm_top_50_test_accuracy": 0.9006,
|
43 |
+
"llm_top_100_test_accuracy": null
|
44 |
+
},
|
45 |
+
"sae": {
|
46 |
+
"sae_test_accuracy": 0.932387548685074,
|
47 |
+
"sae_top_1_test_accuracy": 0.7535250000000001,
|
48 |
+
"sae_top_2_test_accuracy": 0.8026249999999999,
|
49 |
+
"sae_top_5_test_accuracy": 0.8604937500000001,
|
50 |
+
"sae_top_10_test_accuracy": 0.8833500000000001,
|
51 |
+
"sae_top_20_test_accuracy": 0.8980812499999999,
|
52 |
+
"sae_top_50_test_accuracy": 0.9146374999999999,
|
53 |
+
"sae_top_100_test_accuracy": null
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"eval_result_details": [
|
57 |
+
{
|
58 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
59 |
+
"llm_test_accuracy": 0.9512,
|
60 |
+
"llm_top_1_test_accuracy": 0.6045999999999999,
|
61 |
+
"llm_top_2_test_accuracy": 0.74,
|
62 |
+
"llm_top_5_test_accuracy": 0.7994,
|
63 |
+
"llm_top_10_test_accuracy": 0.8628,
|
64 |
+
"llm_top_20_test_accuracy": 0.8812000000000001,
|
65 |
+
"llm_top_50_test_accuracy": 0.921,
|
66 |
+
"llm_top_100_test_accuracy": null,
|
67 |
+
"sae_test_accuracy": 0.9534000515937805,
|
68 |
+
"sae_top_1_test_accuracy": 0.7672000000000001,
|
69 |
+
"sae_top_2_test_accuracy": 0.834,
|
70 |
+
"sae_top_5_test_accuracy": 0.8638,
|
71 |
+
"sae_top_10_test_accuracy": 0.8978000000000002,
|
72 |
+
"sae_top_20_test_accuracy": 0.9119999999999999,
|
73 |
+
"sae_top_50_test_accuracy": 0.9272,
|
74 |
+
"sae_top_100_test_accuracy": null
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
78 |
+
"llm_test_accuracy": 0.9308,
|
79 |
+
"llm_top_1_test_accuracy": 0.5818,
|
80 |
+
"llm_top_2_test_accuracy": 0.7078,
|
81 |
+
"llm_top_5_test_accuracy": 0.8074,
|
82 |
+
"llm_top_10_test_accuracy": 0.8343999999999999,
|
83 |
+
"llm_top_20_test_accuracy": 0.8556000000000001,
|
84 |
+
"llm_top_50_test_accuracy": 0.8896000000000001,
|
85 |
+
"llm_top_100_test_accuracy": null,
|
86 |
+
"sae_test_accuracy": 0.9256000518798828,
|
87 |
+
"sae_top_1_test_accuracy": 0.7288,
|
88 |
+
"sae_top_2_test_accuracy": 0.7487999999999999,
|
89 |
+
"sae_top_5_test_accuracy": 0.8202,
|
90 |
+
"sae_top_10_test_accuracy": 0.8492000000000001,
|
91 |
+
"sae_top_20_test_accuracy": 0.8804000000000001,
|
92 |
+
"sae_top_50_test_accuracy": 0.9103999999999999,
|
93 |
+
"sae_top_100_test_accuracy": null
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
97 |
+
"llm_test_accuracy": 0.9016,
|
98 |
+
"llm_top_1_test_accuracy": 0.6472,
|
99 |
+
"llm_top_2_test_accuracy": 0.7143999999999999,
|
100 |
+
"llm_top_5_test_accuracy": 0.7936,
|
101 |
+
"llm_top_10_test_accuracy": 0.8148,
|
102 |
+
"llm_top_20_test_accuracy": 0.8454,
|
103 |
+
"llm_top_50_test_accuracy": 0.8714000000000001,
|
104 |
+
"llm_top_100_test_accuracy": null,
|
105 |
+
"sae_test_accuracy": 0.915600037574768,
|
106 |
+
"sae_top_1_test_accuracy": 0.7505999999999999,
|
107 |
+
"sae_top_2_test_accuracy": 0.7678,
|
108 |
+
"sae_top_5_test_accuracy": 0.8126,
|
109 |
+
"sae_top_10_test_accuracy": 0.8400000000000001,
|
110 |
+
"sae_top_20_test_accuracy": 0.8598000000000001,
|
111 |
+
"sae_top_50_test_accuracy": 0.8846,
|
112 |
+
"sae_top_100_test_accuracy": null
|
113 |
+
},
|
114 |
+
{
|
115 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
116 |
+
"llm_test_accuracy": 0.8695999999999999,
|
117 |
+
"llm_top_1_test_accuracy": 0.617,
|
118 |
+
"llm_top_2_test_accuracy": 0.6884,
|
119 |
+
"llm_top_5_test_accuracy": 0.738,
|
120 |
+
"llm_top_10_test_accuracy": 0.7702,
|
121 |
+
"llm_top_20_test_accuracy": 0.7857999999999999,
|
122 |
+
"llm_top_50_test_accuracy": 0.826,
|
123 |
+
"llm_top_100_test_accuracy": null,
|
124 |
+
"sae_test_accuracy": 0.8762000560760498,
|
125 |
+
"sae_top_1_test_accuracy": 0.71,
|
126 |
+
"sae_top_2_test_accuracy": 0.7348,
|
127 |
+
"sae_top_5_test_accuracy": 0.7746000000000001,
|
128 |
+
"sae_top_10_test_accuracy": 0.804,
|
129 |
+
"sae_top_20_test_accuracy": 0.8135999999999999,
|
130 |
+
"sae_top_50_test_accuracy": 0.8332,
|
131 |
+
"sae_top_100_test_accuracy": null
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
135 |
+
"llm_test_accuracy": 0.8915,
|
136 |
+
"llm_top_1_test_accuracy": 0.64,
|
137 |
+
"llm_top_2_test_accuracy": 0.717,
|
138 |
+
"llm_top_5_test_accuracy": 0.789,
|
139 |
+
"llm_top_10_test_accuracy": 0.787,
|
140 |
+
"llm_top_20_test_accuracy": 0.813,
|
141 |
+
"llm_top_50_test_accuracy": 0.856,
|
142 |
+
"llm_top_100_test_accuracy": null,
|
143 |
+
"sae_test_accuracy": 0.8930000364780426,
|
144 |
+
"sae_top_1_test_accuracy": 0.575,
|
145 |
+
"sae_top_2_test_accuracy": 0.688,
|
146 |
+
"sae_top_5_test_accuracy": 0.829,
|
147 |
+
"sae_top_10_test_accuracy": 0.861,
|
148 |
+
"sae_top_20_test_accuracy": 0.871,
|
149 |
+
"sae_top_50_test_accuracy": 0.89,
|
150 |
+
"sae_top_100_test_accuracy": null
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"dataset_name": "codeparrot/github-code_results",
|
154 |
+
"llm_test_accuracy": 0.9621999999999999,
|
155 |
+
"llm_top_1_test_accuracy": 0.6902,
|
156 |
+
"llm_top_2_test_accuracy": 0.7020000000000001,
|
157 |
+
"llm_top_5_test_accuracy": 0.7926,
|
158 |
+
"llm_top_10_test_accuracy": 0.8710000000000001,
|
159 |
+
"llm_top_20_test_accuracy": 0.9044000000000001,
|
160 |
+
"llm_top_50_test_accuracy": 0.9266,
|
161 |
+
"llm_top_100_test_accuracy": null,
|
162 |
+
"sae_test_accuracy": 0.9606000423431397,
|
163 |
+
"sae_top_1_test_accuracy": 0.7214,
|
164 |
+
"sae_top_2_test_accuracy": 0.8263999999999999,
|
165 |
+
"sae_top_5_test_accuracy": 0.9056,
|
166 |
+
"sae_top_10_test_accuracy": 0.9213999999999999,
|
167 |
+
"sae_top_20_test_accuracy": 0.9353999999999999,
|
168 |
+
"sae_top_50_test_accuracy": 0.9471999999999999,
|
169 |
+
"sae_top_100_test_accuracy": null
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
173 |
+
"llm_test_accuracy": 0.927,
|
174 |
+
"llm_top_1_test_accuracy": 0.6825000000000001,
|
175 |
+
"llm_top_2_test_accuracy": 0.7164999999999999,
|
176 |
+
"llm_top_5_test_accuracy": 0.83575,
|
177 |
+
"llm_top_10_test_accuracy": 0.8674999999999999,
|
178 |
+
"llm_top_20_test_accuracy": 0.895,
|
179 |
+
"llm_top_50_test_accuracy": 0.9149999999999999,
|
180 |
+
"llm_top_100_test_accuracy": null,
|
181 |
+
"sae_test_accuracy": 0.9355000555515289,
|
182 |
+
"sae_top_1_test_accuracy": 0.81,
|
183 |
+
"sae_top_2_test_accuracy": 0.8320000000000001,
|
184 |
+
"sae_top_5_test_accuracy": 0.87975,
|
185 |
+
"sae_top_10_test_accuracy": 0.8940000000000001,
|
186 |
+
"sae_top_20_test_accuracy": 0.91325,
|
187 |
+
"sae_top_50_test_accuracy": 0.9255,
|
188 |
+
"sae_top_100_test_accuracy": null
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
192 |
+
"llm_test_accuracy": 0.9998000000000001,
|
193 |
+
"llm_top_1_test_accuracy": 0.6772,
|
194 |
+
"llm_top_2_test_accuracy": 0.8932,
|
195 |
+
"llm_top_5_test_accuracy": 0.994,
|
196 |
+
"llm_top_10_test_accuracy": 0.9968,
|
197 |
+
"llm_top_20_test_accuracy": 0.9982,
|
198 |
+
"llm_top_50_test_accuracy": 0.9992000000000001,
|
199 |
+
"llm_top_100_test_accuracy": null,
|
200 |
+
"sae_test_accuracy": 0.9992000579833984,
|
201 |
+
"sae_top_1_test_accuracy": 0.9652,
|
202 |
+
"sae_top_2_test_accuracy": 0.9892,
|
203 |
+
"sae_top_5_test_accuracy": 0.9984,
|
204 |
+
"sae_top_10_test_accuracy": 0.9994,
|
205 |
+
"sae_top_20_test_accuracy": 0.9992000000000001,
|
206 |
+
"sae_top_50_test_accuracy": 0.999,
|
207 |
+
"sae_top_100_test_accuracy": null
|
208 |
+
}
|
209 |
+
],
|
210 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
211 |
+
"sae_lens_id": "custom_sae",
|
212 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpReluTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_32",
|
213 |
+
"sae_lens_version": "5.3.0",
|
214 |
+
"sae_cfg_dict": {
|
215 |
+
"model_name": "pythia-160m-deduped",
|
216 |
+
"d_in": 768,
|
217 |
+
"d_sae": 4096,
|
218 |
+
"hook_layer": 8,
|
219 |
+
"hook_name": "blocks.8.hook_resid_post",
|
220 |
+
"context_size": null,
|
221 |
+
"hook_head_index": null,
|
222 |
+
"architecture": "jumprelu",
|
223 |
+
"apply_b_dec_to_input": null,
|
224 |
+
"finetuning_scaling_factor": null,
|
225 |
+
"activation_fn_str": "",
|
226 |
+
"prepend_bos": true,
|
227 |
+
"normalize_activations": "none",
|
228 |
+
"dtype": "float32",
|
229 |
+
"device": "",
|
230 |
+
"dataset_path": "",
|
231 |
+
"dataset_trust_remote_code": true,
|
232 |
+
"seqpos_slice": [
|
233 |
+
null
|
234 |
+
],
|
235 |
+
"training_tokens": 499998720,
|
236 |
+
"sae_lens_training_version": null,
|
237 |
+
"neuronpedia_id": null
|
238 |
+
},
|
239 |
+
"eval_result_unstructured": null
|
240 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_33_eval_results.json
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5,
|
26 |
+
10,
|
27 |
+
20,
|
28 |
+
50
|
29 |
+
],
|
30 |
+
"lower_vram_usage": false
|
31 |
+
},
|
32 |
+
"eval_id": "7818834a-f48c-4e31-b139-47eab4b678cb",
|
33 |
+
"datetime_epoch_millis": 1736497579345,
|
34 |
+
"eval_result_metrics": {
|
35 |
+
"llm": {
|
36 |
+
"llm_test_accuracy": 0.9292125,
|
37 |
+
"llm_top_1_test_accuracy": 0.6425625,
|
38 |
+
"llm_top_2_test_accuracy": 0.7349125000000001,
|
39 |
+
"llm_top_5_test_accuracy": 0.8187187499999999,
|
40 |
+
"llm_top_10_test_accuracy": 0.8505625000000001,
|
41 |
+
"llm_top_20_test_accuracy": 0.8723249999999999,
|
42 |
+
"llm_top_50_test_accuracy": 0.9006,
|
43 |
+
"llm_top_100_test_accuracy": null
|
44 |
+
},
|
45 |
+
"sae": {
|
46 |
+
"sae_test_accuracy": 0.933300044387579,
|
47 |
+
"sae_top_1_test_accuracy": 0.7825000000000001,
|
48 |
+
"sae_top_2_test_accuracy": 0.81015,
|
49 |
+
"sae_top_5_test_accuracy": 0.8648812499999999,
|
50 |
+
"sae_top_10_test_accuracy": 0.8834749999999999,
|
51 |
+
"sae_top_20_test_accuracy": 0.89695,
|
52 |
+
"sae_top_50_test_accuracy": 0.91175625,
|
53 |
+
"sae_top_100_test_accuracy": null
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"eval_result_details": [
|
57 |
+
{
|
58 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
59 |
+
"llm_test_accuracy": 0.9512,
|
60 |
+
"llm_top_1_test_accuracy": 0.6045999999999999,
|
61 |
+
"llm_top_2_test_accuracy": 0.74,
|
62 |
+
"llm_top_5_test_accuracy": 0.7994,
|
63 |
+
"llm_top_10_test_accuracy": 0.8628,
|
64 |
+
"llm_top_20_test_accuracy": 0.8812000000000001,
|
65 |
+
"llm_top_50_test_accuracy": 0.921,
|
66 |
+
"llm_top_100_test_accuracy": null,
|
67 |
+
"sae_test_accuracy": 0.9562000513076783,
|
68 |
+
"sae_top_1_test_accuracy": 0.804,
|
69 |
+
"sae_top_2_test_accuracy": 0.8232000000000002,
|
70 |
+
"sae_top_5_test_accuracy": 0.8516,
|
71 |
+
"sae_top_10_test_accuracy": 0.8865999999999999,
|
72 |
+
"sae_top_20_test_accuracy": 0.908,
|
73 |
+
"sae_top_50_test_accuracy": 0.9241999999999999,
|
74 |
+
"sae_top_100_test_accuracy": null
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
78 |
+
"llm_test_accuracy": 0.9308,
|
79 |
+
"llm_top_1_test_accuracy": 0.5818,
|
80 |
+
"llm_top_2_test_accuracy": 0.7078,
|
81 |
+
"llm_top_5_test_accuracy": 0.8074,
|
82 |
+
"llm_top_10_test_accuracy": 0.8343999999999999,
|
83 |
+
"llm_top_20_test_accuracy": 0.8556000000000001,
|
84 |
+
"llm_top_50_test_accuracy": 0.8896000000000001,
|
85 |
+
"llm_top_100_test_accuracy": null,
|
86 |
+
"sae_test_accuracy": 0.9320000529289245,
|
87 |
+
"sae_top_1_test_accuracy": 0.7869999999999999,
|
88 |
+
"sae_top_2_test_accuracy": 0.8061999999999999,
|
89 |
+
"sae_top_5_test_accuracy": 0.8320000000000001,
|
90 |
+
"sae_top_10_test_accuracy": 0.8535999999999999,
|
91 |
+
"sae_top_20_test_accuracy": 0.8762000000000001,
|
92 |
+
"sae_top_50_test_accuracy": 0.9019999999999999,
|
93 |
+
"sae_top_100_test_accuracy": null
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
97 |
+
"llm_test_accuracy": 0.9016,
|
98 |
+
"llm_top_1_test_accuracy": 0.6472,
|
99 |
+
"llm_top_2_test_accuracy": 0.7143999999999999,
|
100 |
+
"llm_top_5_test_accuracy": 0.7936,
|
101 |
+
"llm_top_10_test_accuracy": 0.8148,
|
102 |
+
"llm_top_20_test_accuracy": 0.8454,
|
103 |
+
"llm_top_50_test_accuracy": 0.8714000000000001,
|
104 |
+
"llm_top_100_test_accuracy": null,
|
105 |
+
"sae_test_accuracy": 0.9116000533103943,
|
106 |
+
"sae_top_1_test_accuracy": 0.7365999999999999,
|
107 |
+
"sae_top_2_test_accuracy": 0.76,
|
108 |
+
"sae_top_5_test_accuracy": 0.8156000000000001,
|
109 |
+
"sae_top_10_test_accuracy": 0.8318,
|
110 |
+
"sae_top_20_test_accuracy": 0.8594000000000002,
|
111 |
+
"sae_top_50_test_accuracy": 0.884,
|
112 |
+
"sae_top_100_test_accuracy": null
|
113 |
+
},
|
114 |
+
{
|
115 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
116 |
+
"llm_test_accuracy": 0.8695999999999999,
|
117 |
+
"llm_top_1_test_accuracy": 0.617,
|
118 |
+
"llm_top_2_test_accuracy": 0.6884,
|
119 |
+
"llm_top_5_test_accuracy": 0.738,
|
120 |
+
"llm_top_10_test_accuracy": 0.7702,
|
121 |
+
"llm_top_20_test_accuracy": 0.7857999999999999,
|
122 |
+
"llm_top_50_test_accuracy": 0.826,
|
123 |
+
"llm_top_100_test_accuracy": null,
|
124 |
+
"sae_test_accuracy": 0.8690000414848328,
|
125 |
+
"sae_top_1_test_accuracy": 0.7142000000000001,
|
126 |
+
"sae_top_2_test_accuracy": 0.7202,
|
127 |
+
"sae_top_5_test_accuracy": 0.7798,
|
128 |
+
"sae_top_10_test_accuracy": 0.8076000000000001,
|
129 |
+
"sae_top_20_test_accuracy": 0.8216000000000001,
|
130 |
+
"sae_top_50_test_accuracy": 0.8378,
|
131 |
+
"sae_top_100_test_accuracy": null
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
135 |
+
"llm_test_accuracy": 0.8915,
|
136 |
+
"llm_top_1_test_accuracy": 0.64,
|
137 |
+
"llm_top_2_test_accuracy": 0.717,
|
138 |
+
"llm_top_5_test_accuracy": 0.789,
|
139 |
+
"llm_top_10_test_accuracy": 0.787,
|
140 |
+
"llm_top_20_test_accuracy": 0.813,
|
141 |
+
"llm_top_50_test_accuracy": 0.856,
|
142 |
+
"llm_top_100_test_accuracy": null,
|
143 |
+
"sae_test_accuracy": 0.8960000276565552,
|
144 |
+
"sae_top_1_test_accuracy": 0.761,
|
145 |
+
"sae_top_2_test_accuracy": 0.759,
|
146 |
+
"sae_top_5_test_accuracy": 0.845,
|
147 |
+
"sae_top_10_test_accuracy": 0.858,
|
148 |
+
"sae_top_20_test_accuracy": 0.87,
|
149 |
+
"sae_top_50_test_accuracy": 0.875,
|
150 |
+
"sae_top_100_test_accuracy": null
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"dataset_name": "codeparrot/github-code_results",
|
154 |
+
"llm_test_accuracy": 0.9621999999999999,
|
155 |
+
"llm_top_1_test_accuracy": 0.6902,
|
156 |
+
"llm_top_2_test_accuracy": 0.7020000000000001,
|
157 |
+
"llm_top_5_test_accuracy": 0.7926,
|
158 |
+
"llm_top_10_test_accuracy": 0.8710000000000001,
|
159 |
+
"llm_top_20_test_accuracy": 0.9044000000000001,
|
160 |
+
"llm_top_50_test_accuracy": 0.9266,
|
161 |
+
"llm_top_100_test_accuracy": null,
|
162 |
+
"sae_test_accuracy": 0.9642000555992126,
|
163 |
+
"sae_top_1_test_accuracy": 0.6834,
|
164 |
+
"sae_top_2_test_accuracy": 0.791,
|
165 |
+
"sae_top_5_test_accuracy": 0.9056000000000001,
|
166 |
+
"sae_top_10_test_accuracy": 0.9245999999999999,
|
167 |
+
"sae_top_20_test_accuracy": 0.9296,
|
168 |
+
"sae_top_50_test_accuracy": 0.9436,
|
169 |
+
"sae_top_100_test_accuracy": null
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
173 |
+
"llm_test_accuracy": 0.927,
|
174 |
+
"llm_top_1_test_accuracy": 0.6825000000000001,
|
175 |
+
"llm_top_2_test_accuracy": 0.7164999999999999,
|
176 |
+
"llm_top_5_test_accuracy": 0.83575,
|
177 |
+
"llm_top_10_test_accuracy": 0.8674999999999999,
|
178 |
+
"llm_top_20_test_accuracy": 0.895,
|
179 |
+
"llm_top_50_test_accuracy": 0.9149999999999999,
|
180 |
+
"llm_top_100_test_accuracy": null,
|
181 |
+
"sae_test_accuracy": 0.9380000531673431,
|
182 |
+
"sae_top_1_test_accuracy": 0.7909999999999999,
|
183 |
+
"sae_top_2_test_accuracy": 0.8320000000000001,
|
184 |
+
"sae_top_5_test_accuracy": 0.8912499999999999,
|
185 |
+
"sae_top_10_test_accuracy": 0.9069999999999999,
|
186 |
+
"sae_top_20_test_accuracy": 0.9119999999999999,
|
187 |
+
"sae_top_50_test_accuracy": 0.92825,
|
188 |
+
"sae_top_100_test_accuracy": null
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
192 |
+
"llm_test_accuracy": 0.9998000000000001,
|
193 |
+
"llm_top_1_test_accuracy": 0.6772,
|
194 |
+
"llm_top_2_test_accuracy": 0.8932,
|
195 |
+
"llm_top_5_test_accuracy": 0.994,
|
196 |
+
"llm_top_10_test_accuracy": 0.9968,
|
197 |
+
"llm_top_20_test_accuracy": 0.9982,
|
198 |
+
"llm_top_50_test_accuracy": 0.9992000000000001,
|
199 |
+
"llm_top_100_test_accuracy": null,
|
200 |
+
"sae_test_accuracy": 0.9994000196456909,
|
201 |
+
"sae_top_1_test_accuracy": 0.9827999999999999,
|
202 |
+
"sae_top_2_test_accuracy": 0.9895999999999999,
|
203 |
+
"sae_top_5_test_accuracy": 0.9982,
|
204 |
+
"sae_top_10_test_accuracy": 0.9985999999999999,
|
205 |
+
"sae_top_20_test_accuracy": 0.9987999999999999,
|
206 |
+
"sae_top_50_test_accuracy": 0.9992000000000001,
|
207 |
+
"sae_top_100_test_accuracy": null
|
208 |
+
}
|
209 |
+
],
|
210 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
211 |
+
"sae_lens_id": "custom_sae",
|
212 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpReluTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_33",
|
213 |
+
"sae_lens_version": "5.3.0",
|
214 |
+
"sae_cfg_dict": {
|
215 |
+
"model_name": "pythia-160m-deduped",
|
216 |
+
"d_in": 768,
|
217 |
+
"d_sae": 4096,
|
218 |
+
"hook_layer": 8,
|
219 |
+
"hook_name": "blocks.8.hook_resid_post",
|
220 |
+
"context_size": null,
|
221 |
+
"hook_head_index": null,
|
222 |
+
"architecture": "jumprelu",
|
223 |
+
"apply_b_dec_to_input": null,
|
224 |
+
"finetuning_scaling_factor": null,
|
225 |
+
"activation_fn_str": "",
|
226 |
+
"prepend_bos": true,
|
227 |
+
"normalize_activations": "none",
|
228 |
+
"dtype": "float32",
|
229 |
+
"device": "",
|
230 |
+
"dataset_path": "",
|
231 |
+
"dataset_trust_remote_code": true,
|
232 |
+
"seqpos_slice": [
|
233 |
+
null
|
234 |
+
],
|
235 |
+
"training_tokens": 499998720,
|
236 |
+
"sae_lens_training_version": null,
|
237 |
+
"neuronpedia_id": null
|
238 |
+
},
|
239 |
+
"eval_result_unstructured": null
|
240 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_34_eval_results.json
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5,
|
26 |
+
10,
|
27 |
+
20,
|
28 |
+
50
|
29 |
+
],
|
30 |
+
"lower_vram_usage": false
|
31 |
+
},
|
32 |
+
"eval_id": "19da201f-3513-4186-82ef-3e5f2f09db92",
|
33 |
+
"datetime_epoch_millis": 1736498032855,
|
34 |
+
"eval_result_metrics": {
|
35 |
+
"llm": {
|
36 |
+
"llm_test_accuracy": 0.9292125,
|
37 |
+
"llm_top_1_test_accuracy": 0.6425625,
|
38 |
+
"llm_top_2_test_accuracy": 0.7349125000000001,
|
39 |
+
"llm_top_5_test_accuracy": 0.8187187499999999,
|
40 |
+
"llm_top_10_test_accuracy": 0.8505625000000001,
|
41 |
+
"llm_top_20_test_accuracy": 0.8723249999999999,
|
42 |
+
"llm_top_50_test_accuracy": 0.9006,
|
43 |
+
"llm_top_100_test_accuracy": null
|
44 |
+
},
|
45 |
+
"sae": {
|
46 |
+
"sae_test_accuracy": 0.932325041666627,
|
47 |
+
"sae_top_1_test_accuracy": 0.774875,
|
48 |
+
"sae_top_2_test_accuracy": 0.8070187499999999,
|
49 |
+
"sae_top_5_test_accuracy": 0.8574124999999999,
|
50 |
+
"sae_top_10_test_accuracy": 0.8764812499999999,
|
51 |
+
"sae_top_20_test_accuracy": 0.8937812499999999,
|
52 |
+
"sae_top_50_test_accuracy": 0.90968125,
|
53 |
+
"sae_top_100_test_accuracy": null
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"eval_result_details": [
|
57 |
+
{
|
58 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
59 |
+
"llm_test_accuracy": 0.9512,
|
60 |
+
"llm_top_1_test_accuracy": 0.6045999999999999,
|
61 |
+
"llm_top_2_test_accuracy": 0.74,
|
62 |
+
"llm_top_5_test_accuracy": 0.7994,
|
63 |
+
"llm_top_10_test_accuracy": 0.8628,
|
64 |
+
"llm_top_20_test_accuracy": 0.8812000000000001,
|
65 |
+
"llm_top_50_test_accuracy": 0.921,
|
66 |
+
"llm_top_100_test_accuracy": null,
|
67 |
+
"sae_test_accuracy": 0.9506000399589538,
|
68 |
+
"sae_top_1_test_accuracy": 0.8484,
|
69 |
+
"sae_top_2_test_accuracy": 0.8459999999999999,
|
70 |
+
"sae_top_5_test_accuracy": 0.8564,
|
71 |
+
"sae_top_10_test_accuracy": 0.877,
|
72 |
+
"sae_top_20_test_accuracy": 0.9064,
|
73 |
+
"sae_top_50_test_accuracy": 0.9260000000000002,
|
74 |
+
"sae_top_100_test_accuracy": null
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
78 |
+
"llm_test_accuracy": 0.9308,
|
79 |
+
"llm_top_1_test_accuracy": 0.5818,
|
80 |
+
"llm_top_2_test_accuracy": 0.7078,
|
81 |
+
"llm_top_5_test_accuracy": 0.8074,
|
82 |
+
"llm_top_10_test_accuracy": 0.8343999999999999,
|
83 |
+
"llm_top_20_test_accuracy": 0.8556000000000001,
|
84 |
+
"llm_top_50_test_accuracy": 0.8896000000000001,
|
85 |
+
"llm_top_100_test_accuracy": null,
|
86 |
+
"sae_test_accuracy": 0.9330000400543212,
|
87 |
+
"sae_top_1_test_accuracy": 0.7594000000000001,
|
88 |
+
"sae_top_2_test_accuracy": 0.806,
|
89 |
+
"sae_top_5_test_accuracy": 0.8300000000000001,
|
90 |
+
"sae_top_10_test_accuracy": 0.8398,
|
91 |
+
"sae_top_20_test_accuracy": 0.8734,
|
92 |
+
"sae_top_50_test_accuracy": 0.8947999999999998,
|
93 |
+
"sae_top_100_test_accuracy": null
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
97 |
+
"llm_test_accuracy": 0.9016,
|
98 |
+
"llm_top_1_test_accuracy": 0.6472,
|
99 |
+
"llm_top_2_test_accuracy": 0.7143999999999999,
|
100 |
+
"llm_top_5_test_accuracy": 0.7936,
|
101 |
+
"llm_top_10_test_accuracy": 0.8148,
|
102 |
+
"llm_top_20_test_accuracy": 0.8454,
|
103 |
+
"llm_top_50_test_accuracy": 0.8714000000000001,
|
104 |
+
"llm_top_100_test_accuracy": null,
|
105 |
+
"sae_test_accuracy": 0.9140000343322754,
|
106 |
+
"sae_top_1_test_accuracy": 0.7372,
|
107 |
+
"sae_top_2_test_accuracy": 0.7769999999999999,
|
108 |
+
"sae_top_5_test_accuracy": 0.8148,
|
109 |
+
"sae_top_10_test_accuracy": 0.8475999999999999,
|
110 |
+
"sae_top_20_test_accuracy": 0.8614,
|
111 |
+
"sae_top_50_test_accuracy": 0.8777999999999999,
|
112 |
+
"sae_top_100_test_accuracy": null
|
113 |
+
},
|
114 |
+
{
|
115 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
116 |
+
"llm_test_accuracy": 0.8695999999999999,
|
117 |
+
"llm_top_1_test_accuracy": 0.617,
|
118 |
+
"llm_top_2_test_accuracy": 0.6884,
|
119 |
+
"llm_top_5_test_accuracy": 0.738,
|
120 |
+
"llm_top_10_test_accuracy": 0.7702,
|
121 |
+
"llm_top_20_test_accuracy": 0.7857999999999999,
|
122 |
+
"llm_top_50_test_accuracy": 0.826,
|
123 |
+
"llm_top_100_test_accuracy": null,
|
124 |
+
"sae_test_accuracy": 0.8750000476837159,
|
125 |
+
"sae_top_1_test_accuracy": 0.7239999999999999,
|
126 |
+
"sae_top_2_test_accuracy": 0.743,
|
127 |
+
"sae_top_5_test_accuracy": 0.776,
|
128 |
+
"sae_top_10_test_accuracy": 0.795,
|
129 |
+
"sae_top_20_test_accuracy": 0.8116,
|
130 |
+
"sae_top_50_test_accuracy": 0.8428000000000001,
|
131 |
+
"sae_top_100_test_accuracy": null
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
135 |
+
"llm_test_accuracy": 0.8915,
|
136 |
+
"llm_top_1_test_accuracy": 0.64,
|
137 |
+
"llm_top_2_test_accuracy": 0.717,
|
138 |
+
"llm_top_5_test_accuracy": 0.789,
|
139 |
+
"llm_top_10_test_accuracy": 0.787,
|
140 |
+
"llm_top_20_test_accuracy": 0.813,
|
141 |
+
"llm_top_50_test_accuracy": 0.856,
|
142 |
+
"llm_top_100_test_accuracy": null,
|
143 |
+
"sae_test_accuracy": 0.893000066280365,
|
144 |
+
"sae_top_1_test_accuracy": 0.707,
|
145 |
+
"sae_top_2_test_accuracy": 0.718,
|
146 |
+
"sae_top_5_test_accuracy": 0.829,
|
147 |
+
"sae_top_10_test_accuracy": 0.839,
|
148 |
+
"sae_top_20_test_accuracy": 0.867,
|
149 |
+
"sae_top_50_test_accuracy": 0.877,
|
150 |
+
"sae_top_100_test_accuracy": null
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"dataset_name": "codeparrot/github-code_results",
|
154 |
+
"llm_test_accuracy": 0.9621999999999999,
|
155 |
+
"llm_top_1_test_accuracy": 0.6902,
|
156 |
+
"llm_top_2_test_accuracy": 0.7020000000000001,
|
157 |
+
"llm_top_5_test_accuracy": 0.7926,
|
158 |
+
"llm_top_10_test_accuracy": 0.8710000000000001,
|
159 |
+
"llm_top_20_test_accuracy": 0.9044000000000001,
|
160 |
+
"llm_top_50_test_accuracy": 0.9266,
|
161 |
+
"llm_top_100_test_accuracy": null,
|
162 |
+
"sae_test_accuracy": 0.9606000423431397,
|
163 |
+
"sae_top_1_test_accuracy": 0.7001999999999999,
|
164 |
+
"sae_top_2_test_accuracy": 0.7838,
|
165 |
+
"sae_top_5_test_accuracy": 0.8720000000000001,
|
166 |
+
"sae_top_10_test_accuracy": 0.9139999999999999,
|
167 |
+
"sae_top_20_test_accuracy": 0.9182,
|
168 |
+
"sae_top_50_test_accuracy": 0.9416,
|
169 |
+
"sae_top_100_test_accuracy": null
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
173 |
+
"llm_test_accuracy": 0.927,
|
174 |
+
"llm_top_1_test_accuracy": 0.6825000000000001,
|
175 |
+
"llm_top_2_test_accuracy": 0.7164999999999999,
|
176 |
+
"llm_top_5_test_accuracy": 0.83575,
|
177 |
+
"llm_top_10_test_accuracy": 0.8674999999999999,
|
178 |
+
"llm_top_20_test_accuracy": 0.895,
|
179 |
+
"llm_top_50_test_accuracy": 0.9149999999999999,
|
180 |
+
"llm_top_100_test_accuracy": null,
|
181 |
+
"sae_test_accuracy": 0.9330000430345535,
|
182 |
+
"sae_top_1_test_accuracy": 0.838,
|
183 |
+
"sae_top_2_test_accuracy": 0.8447499999999999,
|
184 |
+
"sae_top_5_test_accuracy": 0.8855,
|
185 |
+
"sae_top_10_test_accuracy": 0.90125,
|
186 |
+
"sae_top_20_test_accuracy": 0.91325,
|
187 |
+
"sae_top_50_test_accuracy": 0.91925,
|
188 |
+
"sae_top_100_test_accuracy": null
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
192 |
+
"llm_test_accuracy": 0.9998000000000001,
|
193 |
+
"llm_top_1_test_accuracy": 0.6772,
|
194 |
+
"llm_top_2_test_accuracy": 0.8932,
|
195 |
+
"llm_top_5_test_accuracy": 0.994,
|
196 |
+
"llm_top_10_test_accuracy": 0.9968,
|
197 |
+
"llm_top_20_test_accuracy": 0.9982,
|
198 |
+
"llm_top_50_test_accuracy": 0.9992000000000001,
|
199 |
+
"llm_top_100_test_accuracy": null,
|
200 |
+
"sae_test_accuracy": 0.9994000196456909,
|
201 |
+
"sae_top_1_test_accuracy": 0.8847999999999999,
|
202 |
+
"sae_top_2_test_accuracy": 0.9376000000000001,
|
203 |
+
"sae_top_5_test_accuracy": 0.9955999999999999,
|
204 |
+
"sae_top_10_test_accuracy": 0.9982,
|
205 |
+
"sae_top_20_test_accuracy": 0.999,
|
206 |
+
"sae_top_50_test_accuracy": 0.9982,
|
207 |
+
"sae_top_100_test_accuracy": null
|
208 |
+
}
|
209 |
+
],
|
210 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
211 |
+
"sae_lens_id": "custom_sae",
|
212 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpReluTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_34",
|
213 |
+
"sae_lens_version": "5.3.0",
|
214 |
+
"sae_cfg_dict": {
|
215 |
+
"model_name": "pythia-160m-deduped",
|
216 |
+
"d_in": 768,
|
217 |
+
"d_sae": 4096,
|
218 |
+
"hook_layer": 8,
|
219 |
+
"hook_name": "blocks.8.hook_resid_post",
|
220 |
+
"context_size": null,
|
221 |
+
"hook_head_index": null,
|
222 |
+
"architecture": "jumprelu",
|
223 |
+
"apply_b_dec_to_input": null,
|
224 |
+
"finetuning_scaling_factor": null,
|
225 |
+
"activation_fn_str": "",
|
226 |
+
"prepend_bos": true,
|
227 |
+
"normalize_activations": "none",
|
228 |
+
"dtype": "float32",
|
229 |
+
"device": "",
|
230 |
+
"dataset_path": "",
|
231 |
+
"dataset_trust_remote_code": true,
|
232 |
+
"seqpos_slice": [
|
233 |
+
null
|
234 |
+
],
|
235 |
+
"training_tokens": 499998720,
|
236 |
+
"sae_lens_training_version": null,
|
237 |
+
"neuronpedia_id": null
|
238 |
+
},
|
239 |
+
"eval_result_unstructured": null
|
240 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpRelu_pythia-160m-deduped__0108_resid_post_layer_8_trainer_35_eval_results.json
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5,
|
26 |
+
10,
|
27 |
+
20,
|
28 |
+
50
|
29 |
+
],
|
30 |
+
"lower_vram_usage": false
|
31 |
+
},
|
32 |
+
"eval_id": "c1692962-44d2-4408-a8f6-d4dfd38e9b69",
|
33 |
+
"datetime_epoch_millis": 1736498502648,
|
34 |
+
"eval_result_metrics": {
|
35 |
+
"llm": {
|
36 |
+
"llm_test_accuracy": 0.9292125,
|
37 |
+
"llm_top_1_test_accuracy": 0.6425625,
|
38 |
+
"llm_top_2_test_accuracy": 0.7349125000000001,
|
39 |
+
"llm_top_5_test_accuracy": 0.8187187499999999,
|
40 |
+
"llm_top_10_test_accuracy": 0.8505625000000001,
|
41 |
+
"llm_top_20_test_accuracy": 0.8723249999999999,
|
42 |
+
"llm_top_50_test_accuracy": 0.9006,
|
43 |
+
"llm_top_100_test_accuracy": null
|
44 |
+
},
|
45 |
+
"sae": {
|
46 |
+
"sae_test_accuracy": 0.9271812852472068,
|
47 |
+
"sae_top_1_test_accuracy": 0.74820625,
|
48 |
+
"sae_top_2_test_accuracy": 0.79564375,
|
49 |
+
"sae_top_5_test_accuracy": 0.845225,
|
50 |
+
"sae_top_10_test_accuracy": 0.8697062500000001,
|
51 |
+
"sae_top_20_test_accuracy": 0.8823749999999999,
|
52 |
+
"sae_top_50_test_accuracy": 0.9043000000000001,
|
53 |
+
"sae_top_100_test_accuracy": null
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"eval_result_details": [
|
57 |
+
{
|
58 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
59 |
+
"llm_test_accuracy": 0.9512,
|
60 |
+
"llm_top_1_test_accuracy": 0.6045999999999999,
|
61 |
+
"llm_top_2_test_accuracy": 0.74,
|
62 |
+
"llm_top_5_test_accuracy": 0.7994,
|
63 |
+
"llm_top_10_test_accuracy": 0.8628,
|
64 |
+
"llm_top_20_test_accuracy": 0.8812000000000001,
|
65 |
+
"llm_top_50_test_accuracy": 0.921,
|
66 |
+
"llm_top_100_test_accuracy": null,
|
67 |
+
"sae_test_accuracy": 0.9500000476837158,
|
68 |
+
"sae_top_1_test_accuracy": 0.7906000000000001,
|
69 |
+
"sae_top_2_test_accuracy": 0.8103999999999999,
|
70 |
+
"sae_top_5_test_accuracy": 0.8542,
|
71 |
+
"sae_top_10_test_accuracy": 0.8835999999999998,
|
72 |
+
"sae_top_20_test_accuracy": 0.8921999999999999,
|
73 |
+
"sae_top_50_test_accuracy": 0.9309999999999998,
|
74 |
+
"sae_top_100_test_accuracy": null
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
78 |
+
"llm_test_accuracy": 0.9308,
|
79 |
+
"llm_top_1_test_accuracy": 0.5818,
|
80 |
+
"llm_top_2_test_accuracy": 0.7078,
|
81 |
+
"llm_top_5_test_accuracy": 0.8074,
|
82 |
+
"llm_top_10_test_accuracy": 0.8343999999999999,
|
83 |
+
"llm_top_20_test_accuracy": 0.8556000000000001,
|
84 |
+
"llm_top_50_test_accuracy": 0.8896000000000001,
|
85 |
+
"llm_top_100_test_accuracy": null,
|
86 |
+
"sae_test_accuracy": 0.9300000429153442,
|
87 |
+
"sae_top_1_test_accuracy": 0.7129999999999999,
|
88 |
+
"sae_top_2_test_accuracy": 0.7684,
|
89 |
+
"sae_top_5_test_accuracy": 0.8106000000000002,
|
90 |
+
"sae_top_10_test_accuracy": 0.8334000000000001,
|
91 |
+
"sae_top_20_test_accuracy": 0.8526,
|
92 |
+
"sae_top_50_test_accuracy": 0.8868,
|
93 |
+
"sae_top_100_test_accuracy": null
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
97 |
+
"llm_test_accuracy": 0.9016,
|
98 |
+
"llm_top_1_test_accuracy": 0.6472,
|
99 |
+
"llm_top_2_test_accuracy": 0.7143999999999999,
|
100 |
+
"llm_top_5_test_accuracy": 0.7936,
|
101 |
+
"llm_top_10_test_accuracy": 0.8148,
|
102 |
+
"llm_top_20_test_accuracy": 0.8454,
|
103 |
+
"llm_top_50_test_accuracy": 0.8714000000000001,
|
104 |
+
"llm_top_100_test_accuracy": null,
|
105 |
+
"sae_test_accuracy": 0.9034000396728515,
|
106 |
+
"sae_top_1_test_accuracy": 0.7508,
|
107 |
+
"sae_top_2_test_accuracy": 0.764,
|
108 |
+
"sae_top_5_test_accuracy": 0.7946000000000001,
|
109 |
+
"sae_top_10_test_accuracy": 0.8328,
|
110 |
+
"sae_top_20_test_accuracy": 0.8535999999999999,
|
111 |
+
"sae_top_50_test_accuracy": 0.8757999999999999,
|
112 |
+
"sae_top_100_test_accuracy": null
|
113 |
+
},
|
114 |
+
{
|
115 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
116 |
+
"llm_test_accuracy": 0.8695999999999999,
|
117 |
+
"llm_top_1_test_accuracy": 0.617,
|
118 |
+
"llm_top_2_test_accuracy": 0.6884,
|
119 |
+
"llm_top_5_test_accuracy": 0.738,
|
120 |
+
"llm_top_10_test_accuracy": 0.7702,
|
121 |
+
"llm_top_20_test_accuracy": 0.7857999999999999,
|
122 |
+
"llm_top_50_test_accuracy": 0.826,
|
123 |
+
"llm_top_100_test_accuracy": null,
|
124 |
+
"sae_test_accuracy": 0.8604000329971313,
|
125 |
+
"sae_top_1_test_accuracy": 0.647,
|
126 |
+
"sae_top_2_test_accuracy": 0.6744,
|
127 |
+
"sae_top_5_test_accuracy": 0.756,
|
128 |
+
"sae_top_10_test_accuracy": 0.7826000000000001,
|
129 |
+
"sae_top_20_test_accuracy": 0.798,
|
130 |
+
"sae_top_50_test_accuracy": 0.8295999999999999,
|
131 |
+
"sae_top_100_test_accuracy": null
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
135 |
+
"llm_test_accuracy": 0.8915,
|
136 |
+
"llm_top_1_test_accuracy": 0.64,
|
137 |
+
"llm_top_2_test_accuracy": 0.717,
|
138 |
+
"llm_top_5_test_accuracy": 0.789,
|
139 |
+
"llm_top_10_test_accuracy": 0.787,
|
140 |
+
"llm_top_20_test_accuracy": 0.813,
|
141 |
+
"llm_top_50_test_accuracy": 0.856,
|
142 |
+
"llm_top_100_test_accuracy": null,
|
143 |
+
"sae_test_accuracy": 0.8930000364780426,
|
144 |
+
"sae_top_1_test_accuracy": 0.766,
|
145 |
+
"sae_top_2_test_accuracy": 0.787,
|
146 |
+
"sae_top_5_test_accuracy": 0.814,
|
147 |
+
"sae_top_10_test_accuracy": 0.828,
|
148 |
+
"sae_top_20_test_accuracy": 0.8485,
|
149 |
+
"sae_top_50_test_accuracy": 0.86,
|
150 |
+
"sae_top_100_test_accuracy": null
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"dataset_name": "codeparrot/github-code_results",
|
154 |
+
"llm_test_accuracy": 0.9621999999999999,
|
155 |
+
"llm_top_1_test_accuracy": 0.6902,
|
156 |
+
"llm_top_2_test_accuracy": 0.7020000000000001,
|
157 |
+
"llm_top_5_test_accuracy": 0.7926,
|
158 |
+
"llm_top_10_test_accuracy": 0.8710000000000001,
|
159 |
+
"llm_top_20_test_accuracy": 0.9044000000000001,
|
160 |
+
"llm_top_50_test_accuracy": 0.9266,
|
161 |
+
"llm_top_100_test_accuracy": null,
|
162 |
+
"sae_test_accuracy": 0.9488000392913818,
|
163 |
+
"sae_top_1_test_accuracy": 0.6799999999999999,
|
164 |
+
"sae_top_2_test_accuracy": 0.7558,
|
165 |
+
"sae_top_5_test_accuracy": 0.8436,
|
166 |
+
"sae_top_10_test_accuracy": 0.9007999999999999,
|
167 |
+
"sae_top_20_test_accuracy": 0.9112,
|
168 |
+
"sae_top_50_test_accuracy": 0.9316000000000001,
|
169 |
+
"sae_top_100_test_accuracy": null
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
173 |
+
"llm_test_accuracy": 0.927,
|
174 |
+
"llm_top_1_test_accuracy": 0.6825000000000001,
|
175 |
+
"llm_top_2_test_accuracy": 0.7164999999999999,
|
176 |
+
"llm_top_5_test_accuracy": 0.83575,
|
177 |
+
"llm_top_10_test_accuracy": 0.8674999999999999,
|
178 |
+
"llm_top_20_test_accuracy": 0.895,
|
179 |
+
"llm_top_50_test_accuracy": 0.9149999999999999,
|
180 |
+
"llm_top_100_test_accuracy": null,
|
181 |
+
"sae_test_accuracy": 0.9322500377893448,
|
182 |
+
"sae_top_1_test_accuracy": 0.78725,
|
183 |
+
"sae_top_2_test_accuracy": 0.84875,
|
184 |
+
"sae_top_5_test_accuracy": 0.893,
|
185 |
+
"sae_top_10_test_accuracy": 0.89925,
|
186 |
+
"sae_top_20_test_accuracy": 0.9045,
|
187 |
+
"sae_top_50_test_accuracy": 0.921,
|
188 |
+
"sae_top_100_test_accuracy": null
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
192 |
+
"llm_test_accuracy": 0.9998000000000001,
|
193 |
+
"llm_top_1_test_accuracy": 0.6772,
|
194 |
+
"llm_top_2_test_accuracy": 0.8932,
|
195 |
+
"llm_top_5_test_accuracy": 0.994,
|
196 |
+
"llm_top_10_test_accuracy": 0.9968,
|
197 |
+
"llm_top_20_test_accuracy": 0.9982,
|
198 |
+
"llm_top_50_test_accuracy": 0.9992000000000001,
|
199 |
+
"llm_top_100_test_accuracy": null,
|
200 |
+
"sae_test_accuracy": 0.9996000051498413,
|
201 |
+
"sae_top_1_test_accuracy": 0.851,
|
202 |
+
"sae_top_2_test_accuracy": 0.9564,
|
203 |
+
"sae_top_5_test_accuracy": 0.9958,
|
204 |
+
"sae_top_10_test_accuracy": 0.9972,
|
205 |
+
"sae_top_20_test_accuracy": 0.9984,
|
206 |
+
"sae_top_50_test_accuracy": 0.9986,
|
207 |
+
"sae_top_100_test_accuracy": null
|
208 |
+
}
|
209 |
+
],
|
210 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
211 |
+
"sae_lens_id": "custom_sae",
|
212 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_JumpReluTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_35",
|
213 |
+
"sae_lens_version": "5.3.0",
|
214 |
+
"sae_cfg_dict": {
|
215 |
+
"model_name": "pythia-160m-deduped",
|
216 |
+
"d_in": 768,
|
217 |
+
"d_sae": 4096,
|
218 |
+
"hook_layer": 8,
|
219 |
+
"hook_name": "blocks.8.hook_resid_post",
|
220 |
+
"context_size": null,
|
221 |
+
"hook_head_index": null,
|
222 |
+
"architecture": "jumprelu",
|
223 |
+
"apply_b_dec_to_input": null,
|
224 |
+
"finetuning_scaling_factor": null,
|
225 |
+
"activation_fn_str": "",
|
226 |
+
"prepend_bos": true,
|
227 |
+
"normalize_activations": "none",
|
228 |
+
"dtype": "float32",
|
229 |
+
"device": "",
|
230 |
+
"dataset_path": "",
|
231 |
+
"dataset_trust_remote_code": true,
|
232 |
+
"seqpos_slice": [
|
233 |
+
null
|
234 |
+
],
|
235 |
+
"training_tokens": 499998720,
|
236 |
+
"sae_lens_training_version": null,
|
237 |
+
"neuronpedia_id": null
|
238 |
+
},
|
239 |
+
"eval_result_unstructured": null
|
240 |
+
}
|
sparse_probing/saebench_pythia-160m-deduped_width-2pow12_date-0108/saebench_pythia-160m-deduped_width-2pow12_date-0108_PAnneal_pythia-160m-deduped__0108_resid_post_layer_8_trainer_0_eval_results.json
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eval_type_id": "sparse_probing",
|
3 |
+
"eval_config": {
|
4 |
+
"random_seed": 42,
|
5 |
+
"dataset_names": [
|
6 |
+
"LabHC/bias_in_bios_class_set1",
|
7 |
+
"LabHC/bias_in_bios_class_set2",
|
8 |
+
"LabHC/bias_in_bios_class_set3",
|
9 |
+
"canrager/amazon_reviews_mcauley_1and5",
|
10 |
+
"canrager/amazon_reviews_mcauley_1and5_sentiment",
|
11 |
+
"codeparrot/github-code",
|
12 |
+
"fancyzhx/ag_news",
|
13 |
+
"Helsinki-NLP/europarl"
|
14 |
+
],
|
15 |
+
"probe_train_set_size": 4000,
|
16 |
+
"probe_test_set_size": 1000,
|
17 |
+
"context_length": 128,
|
18 |
+
"sae_batch_size": 125,
|
19 |
+
"llm_batch_size": 256,
|
20 |
+
"llm_dtype": "float32",
|
21 |
+
"model_name": "pythia-160m-deduped",
|
22 |
+
"k_values": [
|
23 |
+
1,
|
24 |
+
2,
|
25 |
+
5,
|
26 |
+
10,
|
27 |
+
20,
|
28 |
+
50
|
29 |
+
],
|
30 |
+
"lower_vram_usage": false
|
31 |
+
},
|
32 |
+
"eval_id": "6b2a4856-435b-4403-9d20-a8d91e9f5138",
|
33 |
+
"datetime_epoch_millis": 1736498922747,
|
34 |
+
"eval_result_metrics": {
|
35 |
+
"llm": {
|
36 |
+
"llm_test_accuracy": 0.9292125,
|
37 |
+
"llm_top_1_test_accuracy": 0.6425625,
|
38 |
+
"llm_top_2_test_accuracy": 0.7349125000000001,
|
39 |
+
"llm_top_5_test_accuracy": 0.8187187499999999,
|
40 |
+
"llm_top_10_test_accuracy": 0.8505625000000001,
|
41 |
+
"llm_top_20_test_accuracy": 0.8723249999999999,
|
42 |
+
"llm_top_50_test_accuracy": 0.9006,
|
43 |
+
"llm_top_100_test_accuracy": null
|
44 |
+
},
|
45 |
+
"sae": {
|
46 |
+
"sae_test_accuracy": 0.932987543195486,
|
47 |
+
"sae_top_1_test_accuracy": 0.776125,
|
48 |
+
"sae_top_2_test_accuracy": 0.81555,
|
49 |
+
"sae_top_5_test_accuracy": 0.8550875000000001,
|
50 |
+
"sae_top_10_test_accuracy": 0.8778812500000001,
|
51 |
+
"sae_top_20_test_accuracy": 0.89925,
|
52 |
+
"sae_top_50_test_accuracy": 0.9111624999999999,
|
53 |
+
"sae_top_100_test_accuracy": null
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"eval_result_details": [
|
57 |
+
{
|
58 |
+
"dataset_name": "LabHC/bias_in_bios_class_set1_results",
|
59 |
+
"llm_test_accuracy": 0.9512,
|
60 |
+
"llm_top_1_test_accuracy": 0.6045999999999999,
|
61 |
+
"llm_top_2_test_accuracy": 0.74,
|
62 |
+
"llm_top_5_test_accuracy": 0.7994,
|
63 |
+
"llm_top_10_test_accuracy": 0.8628,
|
64 |
+
"llm_top_20_test_accuracy": 0.8812000000000001,
|
65 |
+
"llm_top_50_test_accuracy": 0.921,
|
66 |
+
"llm_top_100_test_accuracy": null,
|
67 |
+
"sae_test_accuracy": 0.9538000464439392,
|
68 |
+
"sae_top_1_test_accuracy": 0.786,
|
69 |
+
"sae_top_2_test_accuracy": 0.8291999999999999,
|
70 |
+
"sae_top_5_test_accuracy": 0.8476000000000001,
|
71 |
+
"sae_top_10_test_accuracy": 0.8718,
|
72 |
+
"sae_top_20_test_accuracy": 0.9113999999999999,
|
73 |
+
"sae_top_50_test_accuracy": 0.9306000000000001,
|
74 |
+
"sae_top_100_test_accuracy": null
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"dataset_name": "LabHC/bias_in_bios_class_set2_results",
|
78 |
+
"llm_test_accuracy": 0.9308,
|
79 |
+
"llm_top_1_test_accuracy": 0.5818,
|
80 |
+
"llm_top_2_test_accuracy": 0.7078,
|
81 |
+
"llm_top_5_test_accuracy": 0.8074,
|
82 |
+
"llm_top_10_test_accuracy": 0.8343999999999999,
|
83 |
+
"llm_top_20_test_accuracy": 0.8556000000000001,
|
84 |
+
"llm_top_50_test_accuracy": 0.8896000000000001,
|
85 |
+
"llm_top_100_test_accuracy": null,
|
86 |
+
"sae_test_accuracy": 0.9304000616073609,
|
87 |
+
"sae_top_1_test_accuracy": 0.7988,
|
88 |
+
"sae_top_2_test_accuracy": 0.8061999999999999,
|
89 |
+
"sae_top_5_test_accuracy": 0.8257999999999999,
|
90 |
+
"sae_top_10_test_accuracy": 0.845,
|
91 |
+
"sae_top_20_test_accuracy": 0.8897999999999999,
|
92 |
+
"sae_top_50_test_accuracy": 0.9032,
|
93 |
+
"sae_top_100_test_accuracy": null
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"dataset_name": "LabHC/bias_in_bios_class_set3_results",
|
97 |
+
"llm_test_accuracy": 0.9016,
|
98 |
+
"llm_top_1_test_accuracy": 0.6472,
|
99 |
+
"llm_top_2_test_accuracy": 0.7143999999999999,
|
100 |
+
"llm_top_5_test_accuracy": 0.7936,
|
101 |
+
"llm_top_10_test_accuracy": 0.8148,
|
102 |
+
"llm_top_20_test_accuracy": 0.8454,
|
103 |
+
"llm_top_50_test_accuracy": 0.8714000000000001,
|
104 |
+
"llm_top_100_test_accuracy": null,
|
105 |
+
"sae_test_accuracy": 0.9124000430107116,
|
106 |
+
"sae_top_1_test_accuracy": 0.73,
|
107 |
+
"sae_top_2_test_accuracy": 0.7806000000000001,
|
108 |
+
"sae_top_5_test_accuracy": 0.817,
|
109 |
+
"sae_top_10_test_accuracy": 0.8379999999999999,
|
110 |
+
"sae_top_20_test_accuracy": 0.8614,
|
111 |
+
"sae_top_50_test_accuracy": 0.8793999999999998,
|
112 |
+
"sae_top_100_test_accuracy": null
|
113 |
+
},
|
114 |
+
{
|
115 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_results",
|
116 |
+
"llm_test_accuracy": 0.8695999999999999,
|
117 |
+
"llm_top_1_test_accuracy": 0.617,
|
118 |
+
"llm_top_2_test_accuracy": 0.6884,
|
119 |
+
"llm_top_5_test_accuracy": 0.738,
|
120 |
+
"llm_top_10_test_accuracy": 0.7702,
|
121 |
+
"llm_top_20_test_accuracy": 0.7857999999999999,
|
122 |
+
"llm_top_50_test_accuracy": 0.826,
|
123 |
+
"llm_top_100_test_accuracy": null,
|
124 |
+
"sae_test_accuracy": 0.8740000247955322,
|
125 |
+
"sae_top_1_test_accuracy": 0.6948000000000001,
|
126 |
+
"sae_top_2_test_accuracy": 0.7420000000000001,
|
127 |
+
"sae_top_5_test_accuracy": 0.7647999999999999,
|
128 |
+
"sae_top_10_test_accuracy": 0.8118000000000001,
|
129 |
+
"sae_top_20_test_accuracy": 0.8272,
|
130 |
+
"sae_top_50_test_accuracy": 0.841,
|
131 |
+
"sae_top_100_test_accuracy": null
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"dataset_name": "canrager/amazon_reviews_mcauley_1and5_sentiment_results",
|
135 |
+
"llm_test_accuracy": 0.8915,
|
136 |
+
"llm_top_1_test_accuracy": 0.64,
|
137 |
+
"llm_top_2_test_accuracy": 0.717,
|
138 |
+
"llm_top_5_test_accuracy": 0.789,
|
139 |
+
"llm_top_10_test_accuracy": 0.787,
|
140 |
+
"llm_top_20_test_accuracy": 0.813,
|
141 |
+
"llm_top_50_test_accuracy": 0.856,
|
142 |
+
"llm_top_100_test_accuracy": null,
|
143 |
+
"sae_test_accuracy": 0.8950000405311584,
|
144 |
+
"sae_top_1_test_accuracy": 0.728,
|
145 |
+
"sae_top_2_test_accuracy": 0.784,
|
146 |
+
"sae_top_5_test_accuracy": 0.803,
|
147 |
+
"sae_top_10_test_accuracy": 0.832,
|
148 |
+
"sae_top_20_test_accuracy": 0.862,
|
149 |
+
"sae_top_50_test_accuracy": 0.875,
|
150 |
+
"sae_top_100_test_accuracy": null
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"dataset_name": "codeparrot/github-code_results",
|
154 |
+
"llm_test_accuracy": 0.9621999999999999,
|
155 |
+
"llm_top_1_test_accuracy": 0.6902,
|
156 |
+
"llm_top_2_test_accuracy": 0.7020000000000001,
|
157 |
+
"llm_top_5_test_accuracy": 0.7926,
|
158 |
+
"llm_top_10_test_accuracy": 0.8710000000000001,
|
159 |
+
"llm_top_20_test_accuracy": 0.9044000000000001,
|
160 |
+
"llm_top_50_test_accuracy": 0.9266,
|
161 |
+
"llm_top_100_test_accuracy": null,
|
162 |
+
"sae_test_accuracy": 0.963200044631958,
|
163 |
+
"sae_top_1_test_accuracy": 0.7196,
|
164 |
+
"sae_top_2_test_accuracy": 0.8233999999999998,
|
165 |
+
"sae_top_5_test_accuracy": 0.9046,
|
166 |
+
"sae_top_10_test_accuracy": 0.9244,
|
167 |
+
"sae_top_20_test_accuracy": 0.931,
|
168 |
+
"sae_top_50_test_accuracy": 0.9433999999999999,
|
169 |
+
"sae_top_100_test_accuracy": null
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"dataset_name": "fancyzhx/ag_news_results",
|
173 |
+
"llm_test_accuracy": 0.927,
|
174 |
+
"llm_top_1_test_accuracy": 0.6825000000000001,
|
175 |
+
"llm_top_2_test_accuracy": 0.7164999999999999,
|
176 |
+
"llm_top_5_test_accuracy": 0.83575,
|
177 |
+
"llm_top_10_test_accuracy": 0.8674999999999999,
|
178 |
+
"llm_top_20_test_accuracy": 0.895,
|
179 |
+
"llm_top_50_test_accuracy": 0.9149999999999999,
|
180 |
+
"llm_top_100_test_accuracy": null,
|
181 |
+
"sae_test_accuracy": 0.9355000555515289,
|
182 |
+
"sae_top_1_test_accuracy": 0.8230000000000001,
|
183 |
+
"sae_top_2_test_accuracy": 0.833,
|
184 |
+
"sae_top_5_test_accuracy": 0.8795,
|
185 |
+
"sae_top_10_test_accuracy": 0.9012499999999999,
|
186 |
+
"sae_top_20_test_accuracy": 0.9119999999999999,
|
187 |
+
"sae_top_50_test_accuracy": 0.9175,
|
188 |
+
"sae_top_100_test_accuracy": null
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"dataset_name": "Helsinki-NLP/europarl_results",
|
192 |
+
"llm_test_accuracy": 0.9998000000000001,
|
193 |
+
"llm_top_1_test_accuracy": 0.6772,
|
194 |
+
"llm_top_2_test_accuracy": 0.8932,
|
195 |
+
"llm_top_5_test_accuracy": 0.994,
|
196 |
+
"llm_top_10_test_accuracy": 0.9968,
|
197 |
+
"llm_top_20_test_accuracy": 0.9982,
|
198 |
+
"llm_top_50_test_accuracy": 0.9992000000000001,
|
199 |
+
"llm_top_100_test_accuracy": null,
|
200 |
+
"sae_test_accuracy": 0.9996000289916992,
|
201 |
+
"sae_top_1_test_accuracy": 0.9288000000000001,
|
202 |
+
"sae_top_2_test_accuracy": 0.9259999999999999,
|
203 |
+
"sae_top_5_test_accuracy": 0.9984000000000002,
|
204 |
+
"sae_top_10_test_accuracy": 0.9987999999999999,
|
205 |
+
"sae_top_20_test_accuracy": 0.9992000000000001,
|
206 |
+
"sae_top_50_test_accuracy": 0.9992000000000001,
|
207 |
+
"sae_top_100_test_accuracy": null
|
208 |
+
}
|
209 |
+
],
|
210 |
+
"sae_bench_commit_hash": "bca84cabc8cd60f8b15f37668faece7bbd9adc23",
|
211 |
+
"sae_lens_id": "custom_sae",
|
212 |
+
"sae_lens_release_id": "saebench_pythia-160m-deduped_width-2pow12_date-0108_PAnnealTrainer_EleutherAI_pythia-160m-deduped_ctx1024_0108_resid_post_layer_8_trainer_0",
|
213 |
+
"sae_lens_version": "5.3.0",
|
214 |
+
"sae_cfg_dict": {
|
215 |
+
"model_name": "pythia-160m-deduped",
|
216 |
+
"d_in": 768,
|
217 |
+
"d_sae": 4096,
|
218 |
+
"hook_layer": 8,
|
219 |
+
"hook_name": "blocks.8.hook_resid_post",
|
220 |
+
"context_size": null,
|
221 |
+
"hook_head_index": null,
|
222 |
+
"architecture": "p_anneal",
|
223 |
+
"apply_b_dec_to_input": null,
|
224 |
+
"finetuning_scaling_factor": null,
|
225 |
+
"activation_fn_str": "",
|
226 |
+
"prepend_bos": true,
|
227 |
+
"normalize_activations": "none",
|
228 |
+
"dtype": "float32",
|
229 |
+
"device": "",
|
230 |
+
"dataset_path": "",
|
231 |
+
"dataset_trust_remote_code": true,
|
232 |
+
"seqpos_slice": [
|
233 |
+
null
|
234 |
+
],
|
235 |
+
"training_tokens": 499998720,
|
236 |
+
"sae_lens_training_version": null,
|
237 |
+
"neuronpedia_id": null
|
238 |
+
},
|
239 |
+
"eval_result_unstructured": null
|
240 |
+
}
|