diff options
author | Max Ryabinin <mryabinin0@gmail.com> | 2022-06-30 18:14:20 +0300 |
---|---|---|
committer | Max Ryabinin <mryabinin0@gmail.com> | 2022-06-30 18:14:20 +0300 |
commit | 33efe4a09f459832e8beceba70add0695cc485e4 (patch) | |
tree | 546ef2dea977f9850b4afeb9bfb18871ef948654 /bitsandbytes/optim/adamw.py | |
parent | 4e60e7dc62c50b6ba9b6becf6e779a1d48906be2 (diff) |
Remove unused imports, fix NotImplementedError
Diffstat (limited to 'bitsandbytes/optim/adamw.py')
-rw-r--r-- | bitsandbytes/optim/adamw.py | 2 |
1 files changed, 0 insertions, 2 deletions
diff --git a/bitsandbytes/optim/adamw.py b/bitsandbytes/optim/adamw.py index 7761f3b..c4f0355 100644 --- a/bitsandbytes/optim/adamw.py +++ b/bitsandbytes/optim/adamw.py @@ -2,9 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -import torch from bitsandbytes.optim.optimizer import Optimizer2State -import bitsandbytes.functional as F class AdamW(Optimizer2State): def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, |