From 87635b03d29dd5dc071f1a31fb60f85b70eafd6b Mon Sep 17 00:00:00 2001 From: wangyu096 <wangyu096@163.com> Date: Thu, 6 Jun 2024 14:42:41 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E6=89=A7=E8=A1=8C=E5=8E=86=E5=8F=B2?= =?UTF-8?q?=E5=BD=92=E6=A1=A3=E6=96=B0=E5=A2=9E=E6=A8=A1=E5=BC=8F-?= =?UTF-8?q?=E5=8F=AA=E5=A4=87=E4=BB=BD=E4=B8=8D=E5=88=A0=E9=99=A4=20#3037?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../job/backup/dao/impl/ExecuteArchiveDAOImpl.java | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/src/backend/job-backup/service-job-backup/src/main/java/com/tencent/bk/job/backup/dao/impl/ExecuteArchiveDAOImpl.java b/src/backend/job-backup/service-job-backup/src/main/java/com/tencent/bk/job/backup/dao/impl/ExecuteArchiveDAOImpl.java index 33ceaea286..aab26f4a2b 100644 --- a/src/backend/job-backup/service-job-backup/src/main/java/com/tencent/bk/job/backup/dao/impl/ExecuteArchiveDAOImpl.java +++ b/src/backend/job-backup/service-job-backup/src/main/java/com/tencent/bk/job/backup/dao/impl/ExecuteArchiveDAOImpl.java @@ -31,6 +31,7 @@ import org.jooq.Loader; import org.jooq.LoaderError; import org.jooq.TableRecord; +import org.jooq.impl.DSL; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -54,11 +55,11 @@ public ExecuteArchiveDAOImpl(DSLContext context) { public Integer batchInsert(List<? extends TableRecord<?>> recordList, int bulkSize) throws IOException { long start = System.currentTimeMillis(); int successInsertedRecords = 0; - String table = recordList.get(0).getTable().getName(); + String tableName = recordList.get(0).getTable().getName(); boolean success = true; try { Loader<?> loader = - context.loadInto(recordList.get(0).getTable()) + context.loadInto(DSL.table(tableName)) // 由于这里是批量写入,jooq 不允许使用 onDuplicateKeyIgnore/onDuplicateKeyUpdate. // 否则会报错"Cannot apply bulk loading with onDuplicateKey flags" // 所以这里暂时使用 onDuplicateKeyError 错误处理方式,等后续流程进一步判断是否是主键冲突错误 @@ -72,7 +73,7 @@ public Integer batchInsert(List<? extends TableRecord<?>> recordList, int bulkSi String bulkInsertResult = successInsertedRecords == recordList.size() ? "success" : "fail"; log.info( "InsertBulk: Load {} data|result|{}|executed|{}|processed|{}|stored|{}|ignored|{}|errors|{}", - table, + tableName, bulkInsertResult, loader.executed(), loader.processed(), @@ -82,7 +83,7 @@ public Integer batchInsert(List<? extends TableRecord<?>> recordList, int bulkSi ); if (CollectionUtils.isNotEmpty(loader.errors())) { for (LoaderError error : loader.errors()) { - ARCHIVE_FAILED_LOGGER.error("Error while load {} data, exception: {}, error row: {}", table, + ARCHIVE_FAILED_LOGGER.error("Error while load {} data, exception: {}, error row: {}", tableName, error.exception().getMessage(), error.row()); } if (hasDuplicateError(loader.errors())) { @@ -91,12 +92,12 @@ public Integer batchInsert(List<? extends TableRecord<?>> recordList, int bulkSi } } } catch (IOException e) { - String errorMsg = String.format("Error while loading %s data!", table); + String errorMsg = String.format("Error while loading %s data!", tableName); log.error(errorMsg, e); success = false; throw e; } finally { - log.info("Load data to {} done! success: {}, total: {}, inserted: {}, cost: {}ms", table, success, + log.info("Load data to {} done! success: {}, total: {}, inserted: {}, cost: {}ms", tableName, success, recordList.size(), successInsertedRecords, System.currentTimeMillis() - start); }