{ "act_layer": "gelu", "depth": 12, "drop": 0.0, "drop_path": 0.1, "embedding_dim": 768, "num_heads": 12, "num_registers": 1, "num_tokens": 20, "patch_size": 16 }