summaryrefslogtreecommitdiff
path: root/bitsandbytes/autograd/_functions.py
diff options
context:
space:
mode:
authorjustheuristic <justheuristic@gmail.com>2022-09-18 00:36:46 +0300
committerjustheuristic <justheuristic@gmail.com>2022-09-18 00:36:46 +0300
commitab9dee062d791ef343ff5f9e8c2c85dc094219ed (patch)
tree88ce521928a5abd9ddd46a9f6de51e20deb14482 /bitsandbytes/autograd/_functions.py
parentcbfdf0b5efe4923ba4533c274ce83072b7e502b5 (diff)
cast edge case
Diffstat (limited to 'bitsandbytes/autograd/_functions.py')
-rw-r--r--bitsandbytes/autograd/_functions.py5
1 files changed, 1 insertions, 4 deletions
diff --git a/bitsandbytes/autograd/_functions.py b/bitsandbytes/autograd/_functions.py
index d0e48b7..1d0002c 100644
--- a/bitsandbytes/autograd/_functions.py
+++ b/bitsandbytes/autograd/_functions.py
@@ -221,9 +221,6 @@ class MatMul8bitLt(torch.autograd.Function):
# 3. Matmul
# 4. Mixed-precision decomposition matmul
# 5. Save state
- requires_gradA = A.requires_grad
- requires_gradB = B.requires_grad
- requires_gradBias = bias is not None and bias.requires_grad
formatB = state.formatB
input_shape = A.shape
if state.outlier_pool is None:
@@ -330,7 +327,7 @@ class MatMul8bitLt(torch.autograd.Function):
ctx.grad_shape = input_shape
ctx.dtype_A, ctx.dtype_B, ctx.dtype_bias = A.dtype, B.dtype, None if bias is None else bias.dtype
- if requires_gradA or requires_gradB:
+ if any(ctx.needs_input_grad[:2]):
ctx.tensors = (CAt, subA)
ctx.tensor_states = (SCAt, state.idx)
else: