Add files using upload-large-folder tool
Browse files- crosscoder/layer_16_31_40_53_width_262k_l0_big/config.json +5 -4
- crosscoder/layer_16_31_40_53_width_262k_l0_medium/config.json +5 -4
- crosscoder/layer_16_31_40_53_width_524k_l0_big/config.json +5 -4
- crosscoder/layer_16_31_40_53_width_524k_l0_medium/config.json +5 -4
- crosscoder/layer_16_31_40_53_width_65k_l0_big/config.json +5 -4
- crosscoder/layer_16_31_40_53_width_65k_l0_medium/config.json +5 -4
crosscoder/layer_16_31_40_53_width_262k_l0_big/config.json
CHANGED
|
@@ -1,9 +1,10 @@
|
|
| 1 |
{
|
| 2 |
-
"hf_hook_point_in": "model.layers.{16
|
| 3 |
-
"hf_hook_point_out": "model.layers.{16
|
| 4 |
"width": 262144,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 150,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 1 |
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.{16,31,40,53}.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.{16,31,40,53}.output",
|
| 4 |
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 150,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "crosscoder"
|
| 10 |
}
|
crosscoder/layer_16_31_40_53_width_262k_l0_medium/config.json
CHANGED
|
@@ -1,9 +1,10 @@
|
|
| 1 |
{
|
| 2 |
-
"hf_hook_point_in": "model.layers.{16
|
| 3 |
-
"hf_hook_point_out": "model.layers.{16
|
| 4 |
"width": 262144,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 50,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 1 |
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.{16,31,40,53}.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.{16,31,40,53}.output",
|
| 4 |
"width": 262144,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 50,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "crosscoder"
|
| 10 |
}
|
crosscoder/layer_16_31_40_53_width_524k_l0_big/config.json
CHANGED
|
@@ -1,9 +1,10 @@
|
|
| 1 |
{
|
| 2 |
-
"hf_hook_point_in": "model.layers.{16
|
| 3 |
-
"hf_hook_point_out": "model.layers.{16
|
| 4 |
"width": 524288,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 150,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 1 |
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.{16,31,40,53}.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.{16,31,40,53}.output",
|
| 4 |
"width": 524288,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 150,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "crosscoder"
|
| 10 |
}
|
crosscoder/layer_16_31_40_53_width_524k_l0_medium/config.json
CHANGED
|
@@ -1,9 +1,10 @@
|
|
| 1 |
{
|
| 2 |
-
"hf_hook_point_in": "model.layers.{16
|
| 3 |
-
"hf_hook_point_out": "model.layers.{16
|
| 4 |
"width": 524288,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 50,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 1 |
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.{16,31,40,53}.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.{16,31,40,53}.output",
|
| 4 |
"width": 524288,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 50,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "crosscoder"
|
| 10 |
}
|
crosscoder/layer_16_31_40_53_width_65k_l0_big/config.json
CHANGED
|
@@ -1,9 +1,10 @@
|
|
| 1 |
{
|
| 2 |
-
"hf_hook_point_in": "model.layers.{16
|
| 3 |
-
"hf_hook_point_out": "model.layers.{16
|
| 4 |
"width": 65536,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 150,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 1 |
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.{16,31,40,53}.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.{16,31,40,53}.output",
|
| 4 |
"width": 65536,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 150,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "crosscoder"
|
| 10 |
}
|
crosscoder/layer_16_31_40_53_width_65k_l0_medium/config.json
CHANGED
|
@@ -1,9 +1,10 @@
|
|
| 1 |
{
|
| 2 |
-
"hf_hook_point_in": "model.layers.{16
|
| 3 |
-
"hf_hook_point_out": "model.layers.{16
|
| 4 |
"width": 65536,
|
| 5 |
-
"model_name": "gemma-
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 50,
|
| 8 |
-
"affine_connection": false
|
|
|
|
| 9 |
}
|
|
|
|
| 1 |
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.{16,31,40,53}.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.{16,31,40,53}.output",
|
| 4 |
"width": 65536,
|
| 5 |
+
"model_name": "google/gemma-3-27b-it",
|
| 6 |
"architecture": "jump_relu",
|
| 7 |
"l0": 50,
|
| 8 |
+
"affine_connection": false,
|
| 9 |
+
"type": "crosscoder"
|
| 10 |
}
|