From 76ece2c126b5255fe973615adf986c4331f521ff Mon Sep 17 00:00:00 2001 From: justheuristic Date: Sun, 18 Sep 2022 00:43:56 +0300 Subject: rollback --- bitsandbytes/autograd/_functions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'bitsandbytes') diff --git a/bitsandbytes/autograd/_functions.py b/bitsandbytes/autograd/_functions.py index 55bedee..1d0002c 100644 --- a/bitsandbytes/autograd/_functions.py +++ b/bitsandbytes/autograd/_functions.py @@ -318,7 +318,7 @@ class MatMul8bitLt(torch.autograd.Function): # 4. Mixed-precision decomposition matmul if coo_tensorA is not None and subA is not None: - output.addmm_(output, subA, state.subB) + output += torch.matmul(subA, state.subB) # 5. Save state ctx.state = state -- cgit v1.2.3