deepfold.config.TrainingConfig

class deepfold.config.TrainingConfig(gradient_clipping: 'bool' = True, clip_grad_max_nrom: 'float' = 0.1, swa_enabled: 'bool' = True, swa_decay_rate: 'float' = 0.9)[source]
__init__(gradient_clipping: bool = True, clip_grad_max_nrom: float = 0.1, swa_enabled: bool = True, swa_decay_rate: float = 0.9) None

Methods

__init__([gradient_clipping, ...])

from_dict(cfg)

from_preset(**additional_options)

to_dict()

Attributes

clip_grad_max_nrom

gradient_clipping

optimizer_adam_amsgrad

optimizer_adam_beta_1

optimizer_adam_beta_2

optimizer_adam_eps

optimizer_adam_weight_decay

swa_decay_rate

swa_enabled