From 550022ea663df53f62d199954c328493043ec1e0 Mon Sep 17 00:00:00 2001 From: LDOUBLEV Date: Mon, 1 Feb 2021 06:44:04 +0000 Subject: [PATCH] fix comment --- ppocr/data/imaug/label_ops.py | 2 +- ppocr/modeling/heads/rec_att_head.py | 15 --------------- 2 files changed, 1 insertion(+), 16 deletions(-) diff --git a/ppocr/data/imaug/label_ops.py b/ppocr/data/imaug/label_ops.py index 191bda92..26ac4d81 100644 --- a/ppocr/data/imaug/label_ops.py +++ b/ppocr/data/imaug/label_ops.py @@ -211,7 +211,7 @@ class AttnLabelEncode(BaseRecLabelEncode): text = self.encode(text) if text is None: return None - if len(text) > self.max_text_len: + if len(text) >= self.max_text_len: return None data['length'] = np.array(len(text)) text = [0] + text + [len(self.character) - 1] + [0] * (self.max_text_len diff --git a/ppocr/modeling/heads/rec_att_head.py b/ppocr/modeling/heads/rec_att_head.py index d01f0e6c..9f065d61 100644 --- a/ppocr/modeling/heads/rec_att_head.py +++ b/ppocr/modeling/heads/rec_att_head.py @@ -194,18 +194,3 @@ class AttentionLSTMCell(nn.Layer): cur_hidden = self.rnn(concat_context, prev_hidden) return cur_hidden, alpha - - -if __name__ == '__main__': - paddle.disable_static() - - model = Attention(100, 200, 10) - - x = np.random.uniform(-1, 1, [2, 10, 100]).astype(np.float32) - y = np.random.randint(0, 10, [2, 21]).astype(np.int32) - - xp = paddle.to_tensor(x) - yp = paddle.to_tensor(y) - - res = model(inputs=xp, targets=yp, is_train=True, batch_max_length=20) - print("res: ", res.shape)