From edc9fa3be8f823bf4d2252f8c4cdb75418fecef0 Mon Sep 17 00:00:00 2001 From: zhangyubo0722 Date: Fri, 27 Sep 2024 17:19:27 +0000 Subject: [PATCH] fix lateocr bug --- ppocr/modeling/backbones/rec_resnetv2.py | 4 ++++ ppocr/modeling/heads/rec_latexocr_head.py | 3 +++ ppocr/utils/export_model.py | 1 + ppocr/utils/save_load.py | 6 +++++- 4 files changed, 13 insertions(+), 1 deletion(-) diff --git a/ppocr/modeling/backbones/rec_resnetv2.py b/ppocr/modeling/backbones/rec_resnetv2.py index ef4ea438e1..476e3baa62 100644 --- a/ppocr/modeling/backbones/rec_resnetv2.py +++ b/ppocr/modeling/backbones/rec_resnetv2.py @@ -89,6 +89,8 @@ def __init__( self.eps = eps def forward(self, x): + if not self.training: + self.export = True if self.same_pad: if self.export: x = pad_same_export(x, self._kernel_size, self._stride, self._dilation) @@ -201,6 +203,8 @@ def __init__( ) def forward(self, x): + if not self.training: + self.export = True if self.export: x = pad_same_export(x, self.ksize, self.stride, value=-float("inf")) else: diff --git a/ppocr/modeling/heads/rec_latexocr_head.py b/ppocr/modeling/heads/rec_latexocr_head.py index 1484f87b12..cab1b8a5a1 100644 --- a/ppocr/modeling/heads/rec_latexocr_head.py +++ b/ppocr/modeling/heads/rec_latexocr_head.py @@ -342,6 +342,8 @@ def forward( mem=None, seq_len=0, ): + if not self.training: + self.is_export = True b, n, _, h, talking_heads, collab_heads, has_context = ( *x.shape, self.heads, @@ -987,6 +989,7 @@ def generate_export( # forward for export def forward(self, inputs, targets=None): if not self.training: + self.is_export = True encoded_feat = inputs batch_num = encoded_feat.shape[0] bos_tensor = paddle.full([batch_num, 1], self.bos_token, dtype=paddle.int64) diff --git a/ppocr/utils/export_model.py b/ppocr/utils/export_model.py index ce4c16e5ce..a62e8109a2 100644 --- a/ppocr/utils/export_model.py +++ b/ppocr/utils/export_model.py @@ -70,6 +70,7 @@ def dump_infer_config(config, path, logger): if hpi_config["Hpi"]["backend_config"].get("tensorrt", None): hpi_config["Hpi"]["supported_backends"]["gpu"].remove("tensorrt") del hpi_config["Hpi"]["backend_config"]["tensorrt"] + hpi_config["Hpi"]["selected_backends"]["gpu"] = "paddle_infer" infer_cfg["Hpi"] = hpi_config["Hpi"] if config["Global"].get("pdx_model_name", None): infer_cfg["Global"] = {} diff --git a/ppocr/utils/save_load.py b/ppocr/utils/save_load.py index f7ee432271..afd7c6ad97 100644 --- a/ppocr/utils/save_load.py +++ b/ppocr/utils/save_load.py @@ -304,6 +304,8 @@ def update_train_results(config, prefix, metric_info, done_flag=False, last_num= metric_score = metric_info["metric"]["acc"] elif "precision" in metric_info["metric"]: metric_score = metric_info["metric"]["precision"] + elif "exp_rate" in metric_info["metric"]: + metric_score = metric_info["metric"]["exp_rate"] else: raise ValueError("No metric score found.") train_results["models"]["best"]["score"] = metric_score @@ -326,8 +328,10 @@ def update_train_results(config, prefix, metric_info, done_flag=False, last_num= metric_score = metric_info["metric"]["acc"] elif "precision" in metric_info["metric"]: metric_score = metric_info["metric"]["precision"] + elif "exp_rate" in metric_info["metric"]: + metric_score = metric_info["metric"]["exp_rate"] else: - raise ValueError("No metric score found.") + metric_score = 0 train_results["models"][f"last_{1}"]["score"] = metric_score for tag in save_model_tag: train_results["models"][f"last_{1}"][tag] = os.path.join(