46 lines
1.2 KiB
JSON
46 lines
1.2 KiB
JSON
{
|
|
"bottleneck_dim": 24,
|
|
"dataset_config_name": [
|
|
"en"
|
|
],
|
|
"delta_type": "adapter",
|
|
"do_eval": true,
|
|
"do_test": true,
|
|
"do_train": true,
|
|
"eval_dataset_config_name": [
|
|
"en"
|
|
],
|
|
"eval_dataset_name": "superglue-multirc",
|
|
"eval_steps": 200,
|
|
"evaluation_strategy": "steps",
|
|
"greater_is_better": true,
|
|
"learning_rate": 0.0003,
|
|
"load_best_model_at_end": true,
|
|
"max_source_length": 256,
|
|
"metric_for_best_model": "eval_accuracy",
|
|
"model_name_or_path": "roberta-base",
|
|
"num_train_epochs": 3,
|
|
"output_dir": "outputs/adapter/roberta-base/superglue-multirc",
|
|
"overwrite_output_dir": true,
|
|
"per_device_eval_batch_size": 32,
|
|
"per_device_train_batch_size": 32,
|
|
"predict_with_generate": true,
|
|
"push_to_hub": true,
|
|
"save_steps": 200,
|
|
"save_strategy": "steps",
|
|
"save_total_limit": 1,
|
|
"seed": 42,
|
|
"task_name": "superglue-multirc",
|
|
"test_dataset_config_name": [
|
|
"en"
|
|
],
|
|
"test_dataset_name": "superglue-multirc",
|
|
"tokenizer_name": "roberta-base",
|
|
"unfrozen_modules": [
|
|
"deltas",
|
|
"layer_norm",
|
|
"final_layer_norm",
|
|
"classifier"
|
|
],
|
|
"warmup_steps": 0
|
|
} |