From 33efe4a09f459832e8beceba70add0695cc485e4 Mon Sep 17 00:00:00 2001 From: Max Ryabinin Date: Thu, 30 Jun 2022 18:14:20 +0300 Subject: Remove unused imports, fix NotImplementedError --- bitsandbytes/optim/adagrad.py | 3 --- 1 file changed, 3 deletions(-) (limited to 'bitsandbytes/optim/adagrad.py') diff --git a/bitsandbytes/optim/adagrad.py b/bitsandbytes/optim/adagrad.py index 84ade3c..4f51250 100644 --- a/bitsandbytes/optim/adagrad.py +++ b/bitsandbytes/optim/adagrad.py @@ -2,11 +2,8 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -import torch from bitsandbytes.optim.optimizer import Optimizer1State -torch.optim.Adagrad - class Adagrad(Optimizer1State): def __init__(self, params, lr=1e-2, lr_decay=0, weight_decay=0, initial_accumulator_value=0, eps=1e-10, optim_bits=32, args=None, min_8bit_size=4096, percentile_clipping=100, block_wise=True): -- cgit v1.2.3