summaryrefslogtreecommitdiff
path: root/bitsandbytes/autograd/_functions.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitsandbytes/autograd/_functions.py')
-rw-r--r--bitsandbytes/autograd/_functions.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/bitsandbytes/autograd/_functions.py b/bitsandbytes/autograd/_functions.py
index 6d473e9..f4a6d57 100644
--- a/bitsandbytes/autograd/_functions.py
+++ b/bitsandbytes/autograd/_functions.py
@@ -232,8 +232,8 @@ class MatMul8bitLt(torch.autograd.Function):
# Cast A to fp16
A_dtype = A.dtype
if A_dtype != torch.float16:
- warnings.warn(f"MatMul8bitLt: temporarily casting input matrix from {A_dtype} to float16")
- A = A.to(torch.float16)
+ warnings.warn(f"MatMul8bitLt: input matrix will be converted from {A_dtype} to float16")
+ A = A.to(torch.float16)
# 1. Quantize A
if len(A.shape) == 3: