diff options
author | Tim Dettmers <dettmers@cs.washington.edu> | 2022-09-20 06:36:32 +0300 |
---|---|---|
committer | justheuristic <justheuristic@gmail.com> | 2022-09-20 06:36:32 +0300 |
commit | 9b7d307b8cc9d88310fe0c0548e4a0fb094f45d3 (patch) | |
tree | f8e84ec39f97fc357ef918a70fb127a883f834b0 /bitsandbytes/autograd | |
parent | cff3a7159943369841675dbc1076e555ffb2260b (diff) |
review
Diffstat (limited to 'bitsandbytes/autograd')
-rw-r--r-- | bitsandbytes/autograd/_functions.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/bitsandbytes/autograd/_functions.py b/bitsandbytes/autograd/_functions.py index 9928fbd..2ddb406 100644 --- a/bitsandbytes/autograd/_functions.py +++ b/bitsandbytes/autograd/_functions.py @@ -381,7 +381,7 @@ class MatMul8bitLt(torch.autograd.Function): grad_A = F.mm_dequant(gradA32, SgradA32, SCgrad, state.SCBt).view(ctx.grad_shape).to(ctx.dtype_A) elif state.CB is not None: - CB = state.CB.to(ctx.dtype_A, copy=True).mul_(state.SCB.unsqueeze(1).div(127.0)) + CB = state.CB.to(ctx.dtype_A, copy=True).mul_(state.SCB.unsqueeze(1).mul(1. / 127.0)) grad_A = torch.matmul(grad_output, CB).view(ctx.grad_shape).to(ctx.dtype_A) else: raise Exception('State must contain either CBt or CB matrix for backward') |