Skip to content

Commit

Permalink
fix shape64 (#14376)
Browse files Browse the repository at this point in the history
  • Loading branch information
wanghuancoder authored Dec 12, 2024
1 parent 78e7184 commit f49dec9
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions ppocr/modeling/heads/rec_sar_head.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,14 +242,14 @@ def _2d_attention(self, decoder_input, feat, holistic_feat, valid_ratios=None):
# bsz * (seq_len + 1) * h * w * attn_size
attn_weight = self.conv1x1_2(attn_weight)
# bsz * (seq_len + 1) * h * w * 1
bsz, T, h, w, c = paddle.shape(attn_weight).astype("int32")
bsz, T, h, w, c = paddle.shape(attn_weight)
assert c == 1

if valid_ratios is not None:
# cal mask of attention weight
for i in range(valid_ratios.shape[0]):
valid_width = paddle.minimum(
w, paddle.ceil(valid_ratios[i] * w).astype("int32")
w, paddle.ceil(valid_ratios[i] * w).astype("int64")
)
if valid_width < w:
attn_weight[i, :, :, valid_width:, :] = float("-inf")
Expand Down

0 comments on commit f49dec9

Please sign in to comment.