diff options
author | Titus von Koeller <titus@vonkoeller.com> | 2022-08-01 09:32:47 -0700 |
---|---|---|
committer | Titus von Koeller <titus@vonkoeller.com> | 2022-08-01 09:32:47 -0700 |
commit | ea7c14f8ef64924f2d0ff80df3cdabf2c7299848 (patch) | |
tree | 3b9ec443a259cf36d87627a8e2cc7d13513f6a21 /bitsandbytes/optim/adagrad.py | |
parent | 3fd06fb6206f46b6d18fbb8a512da63832dea98b (diff) |
reran black with linelength 80 for greater readability
Diffstat (limited to 'bitsandbytes/optim/adagrad.py')
-rw-r--r-- | bitsandbytes/optim/adagrad.py | 12 |
1 files changed, 9 insertions, 3 deletions
diff --git a/bitsandbytes/optim/adagrad.py b/bitsandbytes/optim/adagrad.py index 43e3973..7e2f566 100644 --- a/bitsandbytes/optim/adagrad.py +++ b/bitsandbytes/optim/adagrad.py @@ -23,7 +23,9 @@ class Adagrad(Optimizer1State): if not 0.0 <= lr: raise ValueError("Invalid learning rate: {}".format(lr)) if not 0.0 <= weight_decay: - raise ValueError("Invalid weight_decay value: {}".format(weight_decay)) + raise ValueError( + "Invalid weight_decay value: {}".format(weight_decay) + ) if not 0.0 <= eps: raise ValueError("Invalid epsilon value: {}".format(eps)) if initial_accumulator_value != 0.0: @@ -63,7 +65,9 @@ class Adagrad8bit(Optimizer1State): if not 0.0 <= lr: raise ValueError("Invalid learning rate: {}".format(lr)) if not 0.0 <= weight_decay: - raise ValueError("Invalid weight_decay value: {}".format(weight_decay)) + raise ValueError( + "Invalid weight_decay value: {}".format(weight_decay) + ) if not 0.0 <= eps: raise ValueError("Invalid epsilon value: {}".format(eps)) if initial_accumulator_value != 0.0: @@ -104,7 +108,9 @@ class Adagrad32bit(Optimizer1State): if not 0.0 <= lr: raise ValueError("Invalid learning rate: {}".format(lr)) if not 0.0 <= weight_decay: - raise ValueError("Invalid weight_decay value: {}".format(weight_decay)) + raise ValueError( + "Invalid weight_decay value: {}".format(weight_decay) + ) if not 0.0 <= eps: raise ValueError("Invalid epsilon value: {}".format(eps)) if initial_accumulator_value != 0.0: |