{ "hf_hook_point_in": "model.layers.9.output", "hf_hook_point_out": "model.layers.9.output", "width": 1048576, "model_name": "google/gemma-3-4b-pt", "architecture": "jump_relu", "l0": 17, "affine_connection": false, "type": "sae" }