summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorjustheuristic <justheuristic@gmail.com>2022-09-18 01:09:24 +0300
committerjustheuristic <justheuristic@gmail.com>2022-09-18 01:09:24 +0300
commit5d658171017473b54825dfeac21718f4e4be4eca (patch)
tree713911c2465b202500df596c5632de2b68865a7d
parent4da2227fcbc3803d680dff113403aecac1827bc3 (diff)
debug
-rw-r--r--bitsandbytes/autograd/_functions.py2
-rw-r--r--bitsandbytes/nn/modules.py4
2 files changed, 3 insertions, 3 deletions
diff --git a/bitsandbytes/autograd/_functions.py b/bitsandbytes/autograd/_functions.py
index 407f14b..9928fbd 100644
--- a/bitsandbytes/autograd/_functions.py
+++ b/bitsandbytes/autograd/_functions.py
@@ -370,8 +370,6 @@ class MatMul8bitLt(torch.autograd.Function):
if state.threshold > 0.0 and subA is not None:
grad_B[:, idx] += torch.matmul(grad_output.t(), subA)
- raise NotImplementedError("!!")
-
if req_gradA:
if state.CBt is not None:
C32grad, Sgrad = F.transform(Cgrad, "col32")
diff --git a/bitsandbytes/nn/modules.py b/bitsandbytes/nn/modules.py
index e7e759d..9250fec 100644
--- a/bitsandbytes/nn/modules.py
+++ b/bitsandbytes/nn/modules.py
@@ -237,7 +237,9 @@ class Linear8bitLt(nn.Linear):
if threshold > 0.0 and not has_fp16_weights:
self.state.use_pool = True
- self.weight = Int8Params(self.weight.data, has_fp16_weights=has_fp16_weights)
+ self.weight = Int8Params(
+ self.weight.data, has_fp16_weights=has_fp16_weights, requires_grad=has_fp16_weights
+ )
def init_8bit_state(self):
self.state.CB = self.weight.CB