summaryrefslogtreecommitdiff
path: root/bitsandbytes/autograd
diff options
context:
space:
mode:
authorMax Ryabinin <mryabinin0@gmail.com>2022-08-24 18:43:18 +0300
committerMax Ryabinin <mryabinin0@gmail.com>2022-08-24 18:43:18 +0300
commit9fc0ab415c564d278b673c694de9b884ea1121d2 (patch)
tree21686a913ca9801ddf9b5a31008550967ec822e6 /bitsandbytes/autograd
parent9d60b3c5279641ba936facd710c722ebe52fcf40 (diff)
Remove unused code
Diffstat (limited to 'bitsandbytes/autograd')
-rw-r--r--bitsandbytes/autograd/_functions.py4
1 files changed, 0 insertions, 4 deletions
diff --git a/bitsandbytes/autograd/_functions.py b/bitsandbytes/autograd/_functions.py
index 4dbf129..be975f6 100644
--- a/bitsandbytes/autograd/_functions.py
+++ b/bitsandbytes/autograd/_functions.py
@@ -1,6 +1,5 @@
import operator
import torch
-import bitsandbytes as bnb
import bitsandbytes.functional as F
from dataclasses import dataclass
@@ -378,9 +377,6 @@ class MatMul8bitLt(torch.autograd.Function):
return grad_A, grad_B, None, grad_bias, None
-matmul = MatMul8bitLt.apply
-
-
def matmul(
A: tensor,
B: tensor,