From a9fe0ff98c3293d972eb7a638b9887df0bc0d30d Mon Sep 17 00:00:00 2001 From: justheuristic Date: Sat, 17 Sep 2022 23:34:22 +0300 Subject: recast to fp16 --- bitsandbytes/autograd/_functions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bitsandbytes/autograd/_functions.py b/bitsandbytes/autograd/_functions.py index f4a6d57..dc79bb1 100644 --- a/bitsandbytes/autograd/_functions.py +++ b/bitsandbytes/autograd/_functions.py @@ -275,7 +275,7 @@ class MatMul8bitLt(torch.autograd.Function): state.SCB, state.SCBt, coo_tensorB, - ) = F.double_quant(B) + ) = F.double_quant(B.to(torch.float16)) state.CxB, state.SB = F.transform(CB, to_order=formatB) else: has_grad = False -- cgit v1.2.3