diff options
author | Tim Dettmers <TimDettmers@users.noreply.github.com> | 2022-06-30 08:21:24 -0700 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-06-30 08:21:24 -0700 |
commit | 3418cd390e952a7752fb6b2544c25e25af7c0371 (patch) | |
tree | 546ef2dea977f9850b4afeb9bfb18871ef948654 /bitsandbytes/optim/adagrad.py | |
parent | 4e60e7dc62c50b6ba9b6becf6e779a1d48906be2 (diff) | |
parent | 33efe4a09f459832e8beceba70add0695cc485e4 (diff) |
Merge pull request #2 from TimDettmers/fix_imports
Remove unused imports, fix NotImplementedError
Diffstat (limited to 'bitsandbytes/optim/adagrad.py')
-rw-r--r-- | bitsandbytes/optim/adagrad.py | 3 |
1 files changed, 0 insertions, 3 deletions
diff --git a/bitsandbytes/optim/adagrad.py b/bitsandbytes/optim/adagrad.py index 84ade3c..4f51250 100644 --- a/bitsandbytes/optim/adagrad.py +++ b/bitsandbytes/optim/adagrad.py @@ -2,11 +2,8 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -import torch from bitsandbytes.optim.optimizer import Optimizer1State -torch.optim.Adagrad - class Adagrad(Optimizer1State): def __init__(self, params, lr=1e-2, lr_decay=0, weight_decay=0, initial_accumulator_value=0, eps=1e-10, optim_bits=32, args=None, min_8bit_size=4096, percentile_clipping=100, block_wise=True): |