Add Adamax, NAdam, RAdam optimizers (#2969)

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
Glenn Jocher
2023-06-04 22:35:50 +02:00
committed by GitHub
parent f502b50365
commit 451cf8b647
5 changed files with 144 additions and 134 deletions

View File

@ -14,7 +14,7 @@ except ImportError:
tune = None
default_space = {
# 'optimizer': tune.choice(['SGD', 'Adam', 'AdamW', 'RMSProp']),
# 'optimizer': tune.choice(['SGD', 'Adam', 'AdamW', 'NAdam', 'RAdam', 'RMSProp']),
'lr0': tune.uniform(1e-5, 1e-1),
'lrf': tune.uniform(0.01, 1.0), # final OneCycleLR learning rate (lr0 * lrf)
'momentum': tune.uniform(0.6, 0.98), # SGD momentum/Adam beta1