Skip to content

Commit

Permalink
[Bug] Fix usage of .transpose() and .view() consecutively. (#11979)
Browse files Browse the repository at this point in the history
  • Loading branch information
liaoyanqing666 authored Jan 13, 2025
1 parent f7b3ba8 commit 9dd02d8
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion vllm/attention/layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ def forward(
value,
scale=self.scale)
out = out.transpose(1, 2)
return out.view(bsz, q_len, -1)
return out.reshape(bsz, q_len, -1)


def unified_attention(
Expand Down
2 changes: 1 addition & 1 deletion vllm/model_executor/models/intern_vit.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ def forward(self, x: torch.Tensor) -> torch.Tensor:
v = v.transpose(1, 2)

x = F.scaled_dot_product_attention(q, k, v, scale=self.scale)
x = x.transpose(1, 2).view(B, N, -1)
x = x.transpose(1, 2).reshape(B, N, -1)

x = self.proj(x)
return x
Expand Down

0 comments on commit 9dd02d8

Please sign in to comment.