Skip to content

Commit

Permalink
remove redundant comments
Browse files Browse the repository at this point in the history
  • Loading branch information
zhuhong61 authored Oct 30, 2024
1 parent c4a00a4 commit 4920332
Showing 1 changed file with 0 additions and 2 deletions.
2 changes: 0 additions & 2 deletions bitsandbytes/autograd/_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -517,7 +517,6 @@ def forward(ctx, A, B, out=None, bias=None, quant_state: Optional[F.QuantState]

# 1. Dequantize
# 2. MatmulnN
print("*******quant_state absmax: ", quant_state.absmax)
output = torch.nn.functional.linear(A, F.dequantize_4bit(B, quant_state).to(A.dtype).t(), bias)

# 3. Save state
Expand Down Expand Up @@ -596,5 +595,4 @@ def matmul_4bit(
out += bias
return out
else:
print("^^^^^^^^^grad seperate path^^^^^^^^^")
return MatMul4Bit.apply(A, B, out, bias, quant_state)

0 comments on commit 4920332

Please sign in to comment.