| { | |
| "auto_mapping": null, | |
| "base_model_name_or_path": "/c21071/lgc/llmpeft4apr/models/CodeLlama-13b-hf", | |
| "encoder_hidden_size": 256, | |
| "inference_mode": true, | |
| "num_attention_heads": 40, | |
| "num_layers": 40, | |
| "num_transformer_submodules": 1, | |
| "num_virtual_tokens": 100, | |
| "peft_type": "PREFIX_TUNING", | |
| "prefix_projection": true, | |
| "revision": null, | |
| "task_type": "CAUSAL_LM", | |
| "token_dim": 5120 | |
| } |