From ef2936a90d903d0f9a27e16ecb7f839f2c4d9ba1 Mon Sep 17 00:00:00 2001 From: dbaranchuk Date: Wed, 24 Aug 2022 01:33:04 +0300 Subject: delete CxB from state --- bitsandbytes/nn/modules.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) (limited to 'bitsandbytes') diff --git a/bitsandbytes/nn/modules.py b/bitsandbytes/nn/modules.py index 360a182..03ffd3b 100644 --- a/bitsandbytes/nn/modules.py +++ b/bitsandbytes/nn/modules.py @@ -260,11 +260,10 @@ class Linear8bitLt(nn.Linear): out = bnb.matmul(x, self.weight, bias=self.bias, state=self.state) - # if not self.state.has_fp16_weights and self.state.CB is not None: - # we converted 8-bit row major to turing/ampere format in the first inference pass - # we no longer need the row-major weight - # del self.state.CB - # self.weight.data = self.state.CxB + if not self.state.has_fp16_weights and self.state.CxB is not None: + # In this version, we convert 8-bit row major to turing/ampere format at each inference pass + # Thus, we delete CxB from the state. TODO: do not store it in the state in the first place. + del self.state.CxB return out -- cgit v1.2.3