Spaces:
Configuration error
Configuration error
Create config/bert-finetune.yml
#1
by
rammurmu
- opened
- config/bert-finetune.yml +31 -0
config/bert-finetune.yml
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
task: extractive-qa
|
| 2 |
+
base_model: google-bert/bert-base-uncased
|
| 3 |
+
project_name: autotrain-bert-finetume
|
| 4 |
+
log: tensorboard
|
| 5 |
+
backend: local
|
| 6 |
+
|
| 7 |
+
data:
|
| 8 |
+
path: organic/sustain
|
| 9 |
+
train_split: train
|
| 10 |
+
valid_split: validation
|
| 11 |
+
column_mapping:
|
| 12 |
+
text_column: context
|
| 13 |
+
question_column: question
|
| 14 |
+
answer_column: answers
|
| 15 |
+
|
| 16 |
+
params:
|
| 17 |
+
max_seq_length: 512
|
| 18 |
+
max_doc_stride: 128
|
| 19 |
+
epochs: 3
|
| 20 |
+
batch_size: 4
|
| 21 |
+
lr: 2e-5
|
| 22 |
+
optimizer: adamw_torch
|
| 23 |
+
scheduler: linear
|
| 24 |
+
gradient_accumulation: 1
|
| 25 |
+
mixed_precision: fp16
|
| 26 |
+
|
| 27 |
+
hub:
|
| 28 |
+
username: ${HF_USERNAME}
|
| 29 |
+
token: ${HF_TOKEN}
|
| 30 |
+
push_to_hub: true
|
| 31 |
+
|