NewDisjointMultitask.ConfigΒΆ
Component: NewDisjointMultitask
-
class
NewDisjointMultitask.
Config
[source] Bases:
_NewTask.Config
All Attributes (including base classes)
- data: DisjointMultitaskData.Config = DisjointMultitaskData.Config()
- trainer: TaskTrainer.Config = TaskTrainer.Config()
- use_elastic: Optional[bool] =
None
- tasks: dict[str, NewTask.Config] =
{}
- task_weights: dict[str, float] =
{}
- target_task_name: Optional[str] =
None
- metric_reporter: DisjointMultitaskMetricReporter.Config = DisjointMultitaskMetricReporter.Config()
Default JSON
{
"data": {
"sampler": {
"RoundRobinBatchSampler": {
"iter_to_set_epoch": ""
}
},
"test_key": null
},
"trainer": {
"TaskTrainer": {
"epochs": 10,
"early_stop_after": 0,
"max_clip_norm": null,
"report_train_metrics": true,
"target_time_limit_seconds": null,
"do_eval": true,
"load_best_model_after_train": true,
"num_samples_to_log_progress": 1000,
"num_accumulated_batches": 1,
"num_batches_per_epoch": null,
"optimizer": {
"Adam": {
"lr": 0.001,
"weight_decay": 1e-05,
"eps": 1e-08
}
},
"scheduler": null,
"sparsifier": null,
"fp16_args": {
"FP16OptimizerFairseq": {
"init_loss_scale": 128,
"scale_window": null,
"scale_tolerance": 0.0,
"threshold_loss_scale": null,
"min_loss_scale": 0.0001
}
},
"privacy_engine": null,
"use_tensorboard": false
}
},
"use_elastic": null,
"tasks": {},
"task_weights": {},
"target_task_name": null,
"metric_reporter": {
"output_path": "/tmp/test_out.txt",
"pep_format": false,
"student_column_names": [],
"log_gradient": false,
"use_subtask_select_metric": false
}
}