Skip to content

Commit

Permalink
fix QLoRA mem bug: delete useless buffered activation (bitsandbytes-f…
Browse files Browse the repository at this point in the history
…oundation#1270)

* chore: delete useless buffered activation

* fix: fix bugs
  • Loading branch information
Ther-nullptr authored Jul 16, 2024
1 parent 39b42e7 commit 9e75374
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions bitsandbytes/autograd/_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -513,7 +513,7 @@ def forward(ctx, A, B, out=None, bias=None, quant_state: Optional[F.QuantState]
ctx.dtype_A, ctx.dtype_B, ctx.dtype_bias = A.dtype, B.dtype, None if bias is None else bias.dtype

if any(ctx.needs_input_grad[:2]):
ctx.tensors = (A, B)
ctx.tensors = (None, B)
else:
ctx.tensors = (None, None)

Expand All @@ -526,7 +526,7 @@ def backward(ctx, grad_output):
return torch.zeros_like(ctx.A), torch.zeros_like(ctx.B), None, bias_grad, None

req_gradA, _, _, req_gradBias, _ = ctx.needs_input_grad
A, B = ctx.tensors
_, B = ctx.tensors

grad_A, grad_B, grad_bias = None, None, None

Expand Down

0 comments on commit 9e75374

Please sign in to comment.