Skip to content

Commit

Permalink
more cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
matthewdouglas committed Nov 4, 2024
1 parent b954474 commit 35dbb2e
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 13 deletions.
6 changes: 3 additions & 3 deletions bitsandbytes/functional.py
Original file line number Diff line number Diff line change
Expand Up @@ -2388,9 +2388,9 @@ def int8_linear_matmul(A: torch.Tensor, B: torch.Tensor, out: Optional[torch.Ten
if has_error:
raise RuntimeError(
f"cublasLt ran into an error!\n"
f"\tA: {shapeA}, B: {shapeB}, C: {shapeC}\n"
f"\t(lda, ldb, ldc): {(lda, ldb, ldc)}\n"
f"\t(m, n, k): {(m, n, k)}"
f"\t{shapeA=}, {shapeB=}, {shapeC=}\n"
f"\t{(lda, ldb, ldc)=}\n"
f"\t{(m, n, k)=}"
)

return out
Expand Down
2 changes: 0 additions & 2 deletions bitsandbytes/research/autograd/_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,8 +256,6 @@ def forward(ctx, A, B, out=None, bias=None, state: Optional[MatmulLtState] = Non
if outlier_cols is not None and not state.has_fp16_weights:
# extract outliers
state.idx = outlier_cols

# outliers = F.extract_outliers(state.CxB, state.SB, state.idx.int())
outliers = state.CB[:, state.idx.long()].clone()
state.subB = (outliers * state.SCB.view(-1, 1) / 127.0).t().contiguous().to(A.dtype)
CA[:, state.idx.long()] = 0
Expand Down
8 changes: 0 additions & 8 deletions tests/test_autograd.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,14 +255,6 @@ def test_matmullt(dim1, dim2, dim3, dim4, funcs, dtype, req_grad, transpose, dec
B2 = B2.t().contiguous()

state.CB, state.SCB, _ = bnb.functional.int8_vectorwise_quant(B2.to(torch.float16))

# (
# state.CB,
# CBt,
# state.SCB,
# SCBt,
# coo_tensorB,
# ) = bnb.functional.double_quant(B2.to(torch.float16))
B2 = state.CB

if not transpose[0] and transpose[1]:
Expand Down

0 comments on commit 35dbb2e

Please sign in to comment.