File size: 308 Bytes
36765ee |
1 2 3 4 5 6 7 8 9 10 |
{
"hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
"hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
"width": 262144,
"model_name": "google/gemma-3-4b-pt",
"architecture": "jump_relu",
"l0": 60,
"affine_connection": false,
"type": "transcoder"
} |