summaryrefslogtreecommitdiff
path: root/bitsandbytes/optim/adagrad.py
diff options
context:
space:
mode:
authorMax Ryabinin <mryabinin0@gmail.com>2022-06-30 18:14:20 +0300
committerMax Ryabinin <mryabinin0@gmail.com>2022-06-30 18:14:20 +0300
commit33efe4a09f459832e8beceba70add0695cc485e4 (patch)
tree546ef2dea977f9850b4afeb9bfb18871ef948654 /bitsandbytes/optim/adagrad.py
parent4e60e7dc62c50b6ba9b6becf6e779a1d48906be2 (diff)
Remove unused imports, fix NotImplementedError
Diffstat (limited to 'bitsandbytes/optim/adagrad.py')
-rw-r--r--bitsandbytes/optim/adagrad.py3
1 files changed, 0 insertions, 3 deletions
diff --git a/bitsandbytes/optim/adagrad.py b/bitsandbytes/optim/adagrad.py
index 84ade3c..4f51250 100644
--- a/bitsandbytes/optim/adagrad.py
+++ b/bitsandbytes/optim/adagrad.py
@@ -2,11 +2,8 @@
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
-import torch
from bitsandbytes.optim.optimizer import Optimizer1State
-torch.optim.Adagrad
-
class Adagrad(Optimizer1State):
def __init__(self, params, lr=1e-2, lr_decay=0, weight_decay=0, initial_accumulator_value=0, eps=1e-10,
optim_bits=32, args=None, min_8bit_size=4096, percentile_clipping=100, block_wise=True):