Add files using upload-large-folder tool
Browse files- mlp_out_all/layer_12_width_16k_l0_small/config.json +3 -2
- mlp_out_all/layer_17_width_16k_l0_big/config.json +3 -2
- mlp_out_all/layer_23_width_262k_l0_big/config.json +3 -2
- mlp_out_all/layer_32_width_16k_l0_big/config.json +3 -2
- mlp_out_all/layer_33_width_262k_l0_big/config.json +3 -2
- mlp_out_all/layer_33_width_262k_l0_small/config.json +3 -2
- mlp_out_all/layer_51_width_262k_l0_small/config.json +3 -2
- mlp_out_all/layer_52_width_16k_l0_big/config.json +3 -2
- mlp_out_all/layer_56_width_16k_l0_big/config.json +3 -2
- mlp_out_all/layer_61_width_16k_l0_small/config.json +3 -2
- mlp_out_all/layer_6_width_262k_l0_big/config.json +3 -2
mlp_out_all/layer_12_width_16k_l0_small/config.json
CHANGED
|
@@ -2,8 +2,9 @@
|
|
| 2 |
"hf_hook_point_in": "model.layers.12.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.12.post_feedforward_layernorm.output",
|
| 4 |
"width": 16384,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 15,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 2 |
"hf_hook_point_in": "model.layers.12.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.12.post_feedforward_layernorm.output",
|
| 4 |
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 15,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
}
|
mlp_out_all/layer_17_width_16k_l0_big/config.json
CHANGED
|
@@ -2,8 +2,9 @@
|
|
| 2 |
"hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
|
| 4 |
"width": 16384,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 109,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 2 |
"hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
|
| 4 |
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 109,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
}
|
mlp_out_all/layer_23_width_262k_l0_big/config.json
CHANGED
|
@@ -2,8 +2,9 @@
|
|
| 2 |
"hf_hook_point_in": "model.layers.23.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.23.post_feedforward_layernorm.output",
|
| 4 |
"width": 262144,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 120,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 2 |
"hf_hook_point_in": "model.layers.23.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.23.post_feedforward_layernorm.output",
|
| 4 |
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
}
|
mlp_out_all/layer_32_width_16k_l0_big/config.json
CHANGED
|
@@ -2,8 +2,9 @@
|
|
| 2 |
"hf_hook_point_in": "model.layers.32.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.32.post_feedforward_layernorm.output",
|
| 4 |
"width": 16384,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 120,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 2 |
"hf_hook_point_in": "model.layers.32.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.32.post_feedforward_layernorm.output",
|
| 4 |
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
}
|
mlp_out_all/layer_33_width_262k_l0_big/config.json
CHANGED
|
@@ -2,8 +2,9 @@
|
|
| 2 |
"hf_hook_point_in": "model.layers.33.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.33.post_feedforward_layernorm.output",
|
| 4 |
"width": 262144,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 120,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 2 |
"hf_hook_point_in": "model.layers.33.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.33.post_feedforward_layernorm.output",
|
| 4 |
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
}
|
mlp_out_all/layer_33_width_262k_l0_small/config.json
CHANGED
|
@@ -2,8 +2,9 @@
|
|
| 2 |
"hf_hook_point_in": "model.layers.33.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.33.post_feedforward_layernorm.output",
|
| 4 |
"width": 262144,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 20,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 2 |
"hf_hook_point_in": "model.layers.33.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.33.post_feedforward_layernorm.output",
|
| 4 |
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
}
|
mlp_out_all/layer_51_width_262k_l0_small/config.json
CHANGED
|
@@ -2,8 +2,9 @@
|
|
| 2 |
"hf_hook_point_in": "model.layers.51.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.51.post_feedforward_layernorm.output",
|
| 4 |
"width": 262144,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 20,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 2 |
"hf_hook_point_in": "model.layers.51.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.51.post_feedforward_layernorm.output",
|
| 4 |
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
}
|
mlp_out_all/layer_52_width_16k_l0_big/config.json
CHANGED
|
@@ -2,8 +2,9 @@
|
|
| 2 |
"hf_hook_point_in": "model.layers.52.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.52.post_feedforward_layernorm.output",
|
| 4 |
"width": 16384,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 120,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 2 |
"hf_hook_point_in": "model.layers.52.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.52.post_feedforward_layernorm.output",
|
| 4 |
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
}
|
mlp_out_all/layer_56_width_16k_l0_big/config.json
CHANGED
|
@@ -2,8 +2,9 @@
|
|
| 2 |
"hf_hook_point_in": "model.layers.56.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.56.post_feedforward_layernorm.output",
|
| 4 |
"width": 16384,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 120,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 2 |
"hf_hook_point_in": "model.layers.56.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.56.post_feedforward_layernorm.output",
|
| 4 |
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 120,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
}
|
mlp_out_all/layer_61_width_16k_l0_small/config.json
CHANGED
|
@@ -2,8 +2,9 @@
|
|
| 2 |
"hf_hook_point_in": "model.layers.61.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.61.post_feedforward_layernorm.output",
|
| 4 |
"width": 16384,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 20,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 2 |
"hf_hook_point_in": "model.layers.61.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.61.post_feedforward_layernorm.output",
|
| 4 |
"width": 16384,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 20,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
}
|
mlp_out_all/layer_6_width_262k_l0_big/config.json
CHANGED
|
@@ -2,8 +2,9 @@
|
|
| 2 |
"hf_hook_point_in": "model.layers.6.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.6.post_feedforward_layernorm.output",
|
| 4 |
"width": 262144,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 77,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 2 |
"hf_hook_point_in": "model.layers.6.post_feedforward_layernorm.output",
|
| 3 |
"hf_hook_point_out": "model.layers.6.post_feedforward_layernorm.output",
|
| 4 |
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 77,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "sae"
|
| 10 |
}
|