diff options
author | Tim Dettmers <TimDettmers@users.noreply.github.com> | 2022-06-30 08:21:24 -0700 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-06-30 08:21:24 -0700 |
commit | 3418cd390e952a7752fb6b2544c25e25af7c0371 (patch) | |
tree | 546ef2dea977f9850b4afeb9bfb18871ef948654 /bitsandbytes/optim/adamw.py | |
parent | 4e60e7dc62c50b6ba9b6becf6e779a1d48906be2 (diff) | |
parent | 33efe4a09f459832e8beceba70add0695cc485e4 (diff) |
Merge pull request #2 from TimDettmers/fix_imports
Remove unused imports, fix NotImplementedError
Diffstat (limited to 'bitsandbytes/optim/adamw.py')
-rw-r--r-- | bitsandbytes/optim/adamw.py | 2 |
1 files changed, 0 insertions, 2 deletions
diff --git a/bitsandbytes/optim/adamw.py b/bitsandbytes/optim/adamw.py index 7761f3b..c4f0355 100644 --- a/bitsandbytes/optim/adamw.py +++ b/bitsandbytes/optim/adamw.py @@ -2,9 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -import torch from bitsandbytes.optim.optimizer import Optimizer2State -import bitsandbytes.functional as F class AdamW(Optimizer2State): def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, |