diff options
author | Tim Dettmers <dettmers@g3030.hyak.local> | 2021-10-21 16:04:18 -0700 |
---|---|---|
committer | Tim Dettmers <dettmers@g3030.hyak.local> | 2021-10-21 16:04:18 -0700 |
commit | 0c5fa5a6372b60bac4bdb953dfe8cc4d4e88bd61 (patch) | |
tree | cb4a1be40a9cb601b8327d4677f3d55fbe6956af /bitsandbytes | |
parent | 1ec0d54529a4b8fb79b60ba9149424a420cc7c98 (diff) |
Fixed syntax and import error.
Diffstat (limited to 'bitsandbytes')
-rw-r--r-- | bitsandbytes/optim/adam.py | 4 |
1 files changed, 3 insertions, 1 deletions
diff --git a/bitsandbytes/optim/adam.py b/bitsandbytes/optim/adam.py index c1f455f..eb951ee 100644 --- a/bitsandbytes/optim/adam.py +++ b/bitsandbytes/optim/adam.py @@ -2,6 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +import torch from bitsandbytes.optim.optimizer import Optimizer2State import bitsandbytes.functional as F @@ -49,7 +50,7 @@ class AnalysisAdam(torch.optim.Optimizer): amsgrad (boolean, optional): whether to use the AMSGrad variant of this algorithm from the paper `On the Convergence of Adam and Beyond`_ - .. _Adam\: A Method for Stochastic Optimization: + .. _Adam: A Method for Stochastic Optimization: https://arxiv.org/abs/1412.6980 .. _On the Convergence of Adam and Beyond: https://openreview.net/forum?id=ryQu7f-RZ @@ -192,6 +193,7 @@ class AnalysisAdam(torch.optim.Optimizer): C2 = F.quantize_no_absmax(exp_avg_sq, code=code2) state2 = F.dequantize_no_absmax(C2, code2) elif self.analysis == 'my-quantization-routine': + pass # 1. get code # 2. quantize # 3. dequantize |