From 5d658171017473b54825dfeac21718f4e4be4eca Mon Sep 17 00:00:00 2001 From: justheuristic Date: Sun, 18 Sep 2022 01:09:24 +0300 Subject: debug --- bitsandbytes/autograd/_functions.py | 2 -- bitsandbytes/nn/modules.py | 4 +++- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'bitsandbytes') diff --git a/bitsandbytes/autograd/_functions.py b/bitsandbytes/autograd/_functions.py index 407f14b..9928fbd 100644 --- a/bitsandbytes/autograd/_functions.py +++ b/bitsandbytes/autograd/_functions.py @@ -370,8 +370,6 @@ class MatMul8bitLt(torch.autograd.Function): if state.threshold > 0.0 and subA is not None: grad_B[:, idx] += torch.matmul(grad_output.t(), subA) - raise NotImplementedError("!!") - if req_gradA: if state.CBt is not None: C32grad, Sgrad = F.transform(Cgrad, "col32") diff --git a/bitsandbytes/nn/modules.py b/bitsandbytes/nn/modules.py index e7e759d..9250fec 100644 --- a/bitsandbytes/nn/modules.py +++ b/bitsandbytes/nn/modules.py @@ -237,7 +237,9 @@ class Linear8bitLt(nn.Linear): if threshold > 0.0 and not has_fp16_weights: self.state.use_pool = True - self.weight = Int8Params(self.weight.data, has_fp16_weights=has_fp16_weights) + self.weight = Int8Params( + self.weight.data, has_fp16_weights=has_fp16_weights, requires_grad=has_fp16_weights + ) def init_8bit_state(self): self.state.CB = self.weight.CB -- cgit v1.2.3