From 9417011cb37b182a5fa2fca437aa961fdacf4ad2 Mon Sep 17 00:00:00 2001
From: North Lin <240383347@qq.com>
Date: Fri, 19 Jul 2024 17:39:41 +0800
Subject: [PATCH 01/11] [FLINK-34865][pipeline-connector]support sync table and
column comments
---
.../connectors/pipeline-connectors/mysql.md | 7 ++
.../connectors/pipeline-connectors/mysql.md | 8 ++
.../flink/cdc/common/schema/Schema.java | 1 +
.../flink/FlinkPipelineComposerITCase.java | 35 +++----
.../pom.xml | 2 +-
.../doris/factory/DorisDataSinkFactory.java | 2 +
.../doris/sink/DorisDataSinkOptions.java | 6 ++
.../doris/sink/DorisMetadataApplier.java | 5 +-
.../mysql/factory/MySqlDataSourceFactory.java | 12 +++
.../mysql/source/MySqlDataSource.java | 13 ++-
.../mysql/source/MySqlDataSourceOptions.java | 9 ++
.../mysql/source/MySqlEventDeserializer.java | 8 +-
.../parser/CustomMySqlAntlrDdlParser.java | 5 +-
.../reader/MySqlPipelineRecordEmitter.java | 13 ++-
.../mysql/source/MySqlPipelineITCase.java | 92 +++++++++++++++++++
15 files changed, 193 insertions(+), 25 deletions(-)
diff --git a/docs/content.zh/docs/connectors/pipeline-connectors/mysql.md b/docs/content.zh/docs/connectors/pipeline-connectors/mysql.md
index e7e53f396b8..4a2a77295b4 100644
--- a/docs/content.zh/docs/connectors/pipeline-connectors/mysql.md
+++ b/docs/content.zh/docs/connectors/pipeline-connectors/mysql.md
@@ -268,6 +268,13 @@ pipeline:
是否在快照结束后关闭空闲的 Reader。 此特性需要 flink 版本大于等于 1.14 并且 'execution.checkpointing.checkpoints-after-tasks-finish.enabled' 需要设置为 true。
若 flink 版本大于等于 1.15,'execution.checkpointing.checkpoints-after-tasks-finish.enabled' 默认值变更为 true,可以不用显式配置 'execution.checkpointing.checkpoints-after-tasks-finish.enabled' = true。 |
+
+ | include-comments.enabled |
+ optional |
+ false |
+ Boolean |
+ 是否启用同步表、字段注释特性,默认关闭。注意:开启此特性将会对内存使用产生影响。 |
+
diff --git a/docs/content/docs/connectors/pipeline-connectors/mysql.md b/docs/content/docs/connectors/pipeline-connectors/mysql.md
index 834b51070f0..3c755e551ee 100644
--- a/docs/content/docs/connectors/pipeline-connectors/mysql.md
+++ b/docs/content/docs/connectors/pipeline-connectors/mysql.md
@@ -275,6 +275,14 @@ pipeline:
so it does not need to be explicitly configured 'execution.checkpointing.checkpoints-after-tasks-finish.enabled' = 'true'
+
+ | include-comments.enabled |
+ optional |
+ false |
+ Boolean |
+ Whether enable include table and column comments, by default is false, if set to true, the table and column comments will be sent.
+ Note: Enable this option will bring the implications on memory usage. |
+
diff --git a/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java b/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
index 62657be8855..abdc48ff1af 100644
--- a/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
+++ b/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
@@ -230,6 +230,7 @@ public String toString() {
if (!partitionKeys.isEmpty()) {
sb.append(", partitionKeys=").append(String.join(";", partitionKeys));
}
+ sb.append(", comment=").append(comment);
sb.append(", options=").append(describeOptions());
return sb.toString();
diff --git a/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java b/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java
index 26c9c918751..85433247970 100644
--- a/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java
+++ b/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java
@@ -69,6 +69,7 @@
/** Integration test for {@link FlinkPipelineComposer}. */
class FlinkPipelineComposerITCase {
private static final int MAX_PARALLELISM = 4;
+ private static final String LINE_SEPARATOR = System.lineSeparator();
// Always use parent-first classloader for CDC classes.
// The reason is that ValuesDatabase uses static field for holding data, we need to make sure
@@ -153,10 +154,10 @@ void testSingleSplitSingleTable(ValuesDataSink.SinkApi sinkApi) throws Exception
"default_namespace.default_schema.table1:col1=3;newCol3=");
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split("\n");
+ String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[1, 1], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[2, 2], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[3, 3], op=INSERT, meta=()}",
@@ -215,11 +216,11 @@ void testSingleSplitMultipleTables(ValuesDataSink.SinkApi sinkApi) throws Except
"default_namespace.default_schema.table2:col1=3;col2=3");
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split("\n");
+ String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
- "CreateTableEvent{tableId=default_namespace.default_schema.table2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[1, 1], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[2, 2], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[3, 3], op=INSERT, meta=()}",
@@ -322,10 +323,10 @@ void testTransform(ValuesDataSink.SinkApi sinkApi) throws Exception {
execution.execute();
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split("\n");
+ String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING,`col12` STRING}, primaryKeys=col1, partitionKeys=col12, options=({key1=value1})}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING,`col12` STRING}, primaryKeys=col1, partitionKeys=col12, comment=null, options=({key1=value1})}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[1, 1, 10], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[2, 2, 20], op=INSERT, meta=()}",
"AddColumnEvent{tableId=default_namespace.default_schema.table1, addedColumns=[ColumnWithPosition{column=`col3` STRING, position=LAST, existedColumnName=null}]}",
@@ -389,10 +390,10 @@ void testTransformTwice(ValuesDataSink.SinkApi sinkApi) throws Exception {
execution.execute();
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split("\n");
+ String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING,`col12` STRING}, primaryKeys=col1, partitionKeys=col12, options=({key1=value1})}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING,`col12` STRING}, primaryKeys=col1, partitionKeys=col12, comment=null, options=({key1=value1})}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[1, 1, 11], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[2, 2, 22], op=INSERT, meta=()}",
"AddColumnEvent{tableId=default_namespace.default_schema.table1, addedColumns=[ColumnWithPosition{column=`col3` STRING, position=LAST, existedColumnName=null}]}",
@@ -452,11 +453,11 @@ void testOneToOneRouting() throws Exception {
"default_namespace.default_schema.routed2:col1=3;col2=3");
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split("\n");
+ String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.routed1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
- "CreateTableEvent{tableId=default_namespace.default_schema.routed2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.routed1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.routed2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.routed1, before=[], after=[1, 1], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.routed1, before=[], after=[2, 2], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.routed1, before=[], after=[3, 3], op=INSERT, meta=()}",
@@ -641,10 +642,10 @@ void testMergingWithRoute() throws Exception {
.physicalColumn("gender", DataTypes.STRING())
.primaryKey("id")
.build());
- String[] outputEvents = outCaptor.toString().trim().split("\n");
+ String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.merged, schema=columns={`id` INT,`name` STRING,`age` INT}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.merged, schema=columns={`id` INT,`name` STRING,`age` INT}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.merged, before=[], after=[1, Alice, 18], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.merged, before=[], after=[2, Bob, 20], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.merged, before=[2, Bob, 20], after=[2, Bob, 30], op=UPDATE, meta=()}",
@@ -699,11 +700,11 @@ void testRouteWithReplaceSymbol(ValuesDataSink.SinkApi sinkApi) throws Exception
execution.execute();
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split("\n");
+ String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=replaced_namespace.replaced_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
- "CreateTableEvent{tableId=replaced_namespace.replaced_schema.table2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
+ "CreateTableEvent{tableId=replaced_namespace.replaced_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
+ "CreateTableEvent{tableId=replaced_namespace.replaced_schema.table2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
"DataChangeEvent{tableId=replaced_namespace.replaced_schema.table1, before=[], after=[1, 1], op=INSERT, meta=()}",
"DataChangeEvent{tableId=replaced_namespace.replaced_schema.table1, before=[], after=[2, 2], op=INSERT, meta=()}",
"DataChangeEvent{tableId=replaced_namespace.replaced_schema.table1, before=[], after=[3, 3], op=INSERT, meta=()}",
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/pom.xml b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/pom.xml
index b57906b046b..0f078701c35 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/pom.xml
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/pom.xml
@@ -44,7 +44,7 @@ limitations under the License.
org.apache.doris
flink-doris-connector-${flink.major.version}
- 1.6.0
+ 1.6.2
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/factory/DorisDataSinkFactory.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/factory/DorisDataSinkFactory.java
index d0567ab03d9..4891309d414 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/factory/DorisDataSinkFactory.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/factory/DorisDataSinkFactory.java
@@ -39,6 +39,7 @@
import static org.apache.flink.cdc.connectors.doris.sink.DorisDataSinkOptions.AUTO_REDIRECT;
import static org.apache.flink.cdc.connectors.doris.sink.DorisDataSinkOptions.BENODES;
+import static org.apache.flink.cdc.connectors.doris.sink.DorisDataSinkOptions.CHARSET_ENCODING;
import static org.apache.flink.cdc.connectors.doris.sink.DorisDataSinkOptions.FENODES;
import static org.apache.flink.cdc.connectors.doris.sink.DorisDataSinkOptions.JDBC_URL;
import static org.apache.flink.cdc.connectors.doris.sink.DorisDataSinkOptions.PASSWORD;
@@ -143,6 +144,7 @@ public Set> optionalOptions() {
options.add(JDBC_URL);
options.add(PASSWORD);
options.add(AUTO_REDIRECT);
+ options.add(CHARSET_ENCODING);
options.add(SINK_CHECK_INTERVAL);
options.add(SINK_ENABLE_2PC);
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisDataSinkOptions.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisDataSinkOptions.java
index 62613c059d3..5d58bbc8288 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisDataSinkOptions.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisDataSinkOptions.java
@@ -61,6 +61,12 @@ public class DorisDataSinkOptions {
.withDescription(
"Use automatic redirection of fe without explicitly obtaining the be list");
+ public static final ConfigOption CHARSET_ENCODING =
+ ConfigOptions.key("charset-encoding")
+ .stringType()
+ .defaultValue("UTF-8")
+ .withDescription("Charset encoding for doris http client, default UTF-8.");
+
// Streaming Sink options
public static final ConfigOption SINK_ENABLE_2PC =
ConfigOptions.key("sink.enable-2pc")
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisMetadataApplier.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisMetadataApplier.java
index 7e107eac17f..e850f8d0c7f 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisMetadataApplier.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisMetadataApplier.java
@@ -52,6 +52,7 @@
import java.util.List;
import java.util.Map;
+import static org.apache.flink.cdc.connectors.doris.sink.DorisDataSinkOptions.CHARSET_ENCODING;
import static org.apache.flink.cdc.connectors.doris.sink.DorisDataSinkOptions.TABLE_CREATE_PROPERTIES_PREFIX;
/** Supports {@link DorisDataSink} to schema evolution. */
@@ -63,7 +64,8 @@ public class DorisMetadataApplier implements MetadataApplier {
public DorisMetadataApplier(DorisOptions dorisOptions, Configuration config) {
this.dorisOptions = dorisOptions;
- this.schemaChangeManager = new SchemaChangeManager(dorisOptions);
+ this.schemaChangeManager =
+ new SchemaChangeManager(dorisOptions, config.get(CHARSET_ENCODING));
this.config = config;
}
@@ -97,6 +99,7 @@ private void applyCreateTableEvent(CreateTableEvent event)
tableSchema.setDatabase(tableId.getSchemaName());
tableSchema.setFields(buildFields(schema));
tableSchema.setDistributeKeys(buildDistributeKeys(schema));
+ tableSchema.setTableComment(schema.comment());
if (CollectionUtil.isNullOrEmpty(schema.primaryKeys())) {
tableSchema.setModel(DataModel.DUPLICATE);
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/factory/MySqlDataSourceFactory.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/factory/MySqlDataSourceFactory.java
index b1d3e5966fc..00013197b00 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/factory/MySqlDataSourceFactory.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/factory/MySqlDataSourceFactory.java
@@ -35,8 +35,10 @@
import org.apache.flink.cdc.connectors.mysql.table.StartupOptions;
import org.apache.flink.cdc.connectors.mysql.utils.MySqlSchemaUtils;
import org.apache.flink.cdc.connectors.mysql.utils.OptionUtils;
+import org.apache.flink.cdc.debezium.table.DebeziumOptions;
import org.apache.flink.table.api.ValidationException;
+import io.debezium.relational.RelationalDatabaseConnectorConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -57,6 +59,7 @@
import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.CONNECT_TIMEOUT;
import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.HEARTBEAT_INTERVAL;
import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.HOSTNAME;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.INCLUDE_COMMENTS_ENABLED;
import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.PASSWORD;
import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.PORT;
import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.SCAN_INCREMENTAL_CLOSE_IDLE_READER_ENABLED;
@@ -118,6 +121,7 @@ public DataSource createDataSource(Context context) {
double distributionFactorLower = config.get(CHUNK_KEY_EVEN_DISTRIBUTION_FACTOR_LOWER_BOUND);
boolean closeIdleReaders = config.get(SCAN_INCREMENTAL_CLOSE_IDLE_READER_ENABLED);
+ boolean includeComments = config.get(INCLUDE_COMMENTS_ENABLED);
Duration heartbeatInterval = config.get(HEARTBEAT_INTERVAL);
Duration connectTimeout = config.get(CONNECT_TIMEOUT);
@@ -134,6 +138,13 @@ public DataSource createDataSource(Context context) {
Map configMap = config.toMap();
OptionUtils.printOptions(IDENTIFIER, config.toMap());
+ if (includeComments) {
+ // set debezium config 'include.schema.comments' to true
+ configMap.put(
+ DebeziumOptions.DEBEZIUM_OPTIONS_PREFIX
+ + RelationalDatabaseConnectorConfig.INCLUDE_SCHEMA_COMMENTS.name(),
+ "true");
+ }
MySqlSourceConfigFactory configFactory =
new MySqlSourceConfigFactory()
@@ -220,6 +231,7 @@ public Set> optionalOptions() {
options.add(CHUNK_META_GROUP_SIZE);
options.add(CHUNK_KEY_EVEN_DISTRIBUTION_FACTOR_UPPER_BOUND);
options.add(CHUNK_KEY_EVEN_DISTRIBUTION_FACTOR_LOWER_BOUND);
+ options.add(INCLUDE_COMMENTS_ENABLED);
return options;
}
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSource.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSource.java
index d1dc487c04e..63006f8274b 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSource.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSource.java
@@ -29,6 +29,8 @@
import org.apache.flink.cdc.connectors.mysql.source.reader.MySqlPipelineRecordEmitter;
import org.apache.flink.cdc.debezium.table.DebeziumChangelogMode;
+import io.debezium.relational.RelationalDatabaseConnectorConfig;
+
/** A {@link DataSource} for mysql cdc connector. */
@Internal
public class MySqlDataSource implements DataSource {
@@ -43,9 +45,18 @@ public MySqlDataSource(MySqlSourceConfigFactory configFactory) {
@Override
public EventSourceProvider getEventSourceProvider() {
+ boolean includeComments =
+ sourceConfig
+ .getDbzConfiguration()
+ .getBoolean(
+ RelationalDatabaseConnectorConfig.INCLUDE_SCHEMA_COMMENTS.name(),
+ false);
+
MySqlEventDeserializer deserializer =
new MySqlEventDeserializer(
- DebeziumChangelogMode.ALL, sourceConfig.isIncludeSchemaChanges());
+ DebeziumChangelogMode.ALL,
+ sourceConfig.isIncludeSchemaChanges(),
+ includeComments);
MySqlSource source =
new MySqlSource<>(
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSourceOptions.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSourceOptions.java
index e852eb3d7a8..bd7bd32f3ec 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSourceOptions.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSourceOptions.java
@@ -243,4 +243,13 @@ public class MySqlDataSourceOptions {
+ "If there is a need to use a dot (.) in a regular expression to match any character, "
+ "it is necessary to escape the dot with a backslash."
+ "eg. db0.\\.*, db1.user_table_[0-9]+, db[1-2].[app|web]_order_\\.*");
+
+ @Experimental
+ public static final ConfigOption INCLUDE_COMMENTS_ENABLED =
+ ConfigOptions.key("include-comments.enabled")
+ .booleanType()
+ .defaultValue(false)
+ .withDescription(
+ "Whether enable include table and column comments, by default is false, if set to true, table and column comments will be sent. "
+ + "Note: Enable this option will bring the implications on memory usage.");
}
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlEventDeserializer.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlEventDeserializer.java
index 548e603fa5a..67486aebc60 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlEventDeserializer.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlEventDeserializer.java
@@ -59,21 +59,25 @@ public class MySqlEventDeserializer extends DebeziumEventDeserializationSchema {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private final boolean includeSchemaChanges;
+ private final boolean includeComments;
private transient Tables tables;
private transient CustomMySqlAntlrDdlParser customParser;
public MySqlEventDeserializer(
- DebeziumChangelogMode changelogMode, boolean includeSchemaChanges) {
+ DebeziumChangelogMode changelogMode,
+ boolean includeSchemaChanges,
+ boolean includeComments) {
super(new MySqlSchemaDataTypeInference(), changelogMode);
this.includeSchemaChanges = includeSchemaChanges;
+ this.includeComments = includeComments;
}
@Override
protected List deserializeSchemaChangeRecord(SourceRecord record) {
if (includeSchemaChanges) {
if (customParser == null) {
- customParser = new CustomMySqlAntlrDdlParser();
+ customParser = new CustomMySqlAntlrDdlParser(includeComments);
tables = new Tables();
}
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/parser/CustomMySqlAntlrDdlParser.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/parser/CustomMySqlAntlrDdlParser.java
index 1264aa8d683..624d1ac416c 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/parser/CustomMySqlAntlrDdlParser.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/parser/CustomMySqlAntlrDdlParser.java
@@ -23,6 +23,7 @@
import io.debezium.antlr.DataTypeResolver;
import io.debezium.connector.mysql.antlr.MySqlAntlrDdlParser;
import io.debezium.ddl.parser.mysql.generated.MySqlParser;
+import io.debezium.relational.Tables;
import java.sql.Types;
import java.util.ArrayList;
@@ -35,8 +36,8 @@ public class CustomMySqlAntlrDdlParser extends MySqlAntlrDdlParser {
private final LinkedList parsedEvents;
- public CustomMySqlAntlrDdlParser() {
- super();
+ public CustomMySqlAntlrDdlParser(boolean includeComments) {
+ super(true, false, includeComments, null, Tables.TableFilter.includeAll());
this.parsedEvents = new LinkedList<>();
}
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/reader/MySqlPipelineRecordEmitter.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/reader/MySqlPipelineRecordEmitter.java
index b7434bf94d5..e6d36241acf 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/reader/MySqlPipelineRecordEmitter.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/reader/MySqlPipelineRecordEmitter.java
@@ -35,6 +35,7 @@
import io.debezium.connector.mysql.antlr.MySqlAntlrDdlParser;
import io.debezium.jdbc.JdbcConnection;
import io.debezium.relational.Column;
+import io.debezium.relational.RelationalDatabaseConnectorConfig;
import io.debezium.relational.Table;
import io.debezium.relational.TableId;
import io.debezium.relational.Tables;
@@ -207,6 +208,7 @@ private Schema parseDDL(String ddlStatement, TableId tableId) {
}
tableBuilder.physicalColumn(colName, dataType, column.comment());
}
+ tableBuilder.comment(table.comment());
List primaryKey = table.primaryKeyColumnNames();
if (Objects.nonNull(primaryKey) && !primaryKey.isEmpty()) {
@@ -225,7 +227,16 @@ private synchronized Table parseDdl(String ddlStatement, TableId tableId) {
private synchronized MySqlAntlrDdlParser getParser() {
if (mySqlAntlrDdlParser == null) {
- mySqlAntlrDdlParser = new MySqlAntlrDdlParser();
+ boolean includeComments =
+ sourceConfig
+ .getDbzConfiguration()
+ .getBoolean(
+ RelationalDatabaseConnectorConfig.INCLUDE_SCHEMA_COMMENTS
+ .name(),
+ false);
+ mySqlAntlrDdlParser =
+ new MySqlAntlrDdlParser(
+ true, false, includeComments, null, Tables.TableFilter.includeAll());
}
return mySqlAntlrDdlParser;
}
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java
index 5eb6ce0e598..f10092aa75a 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java
@@ -19,6 +19,7 @@
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.cdc.common.configuration.Configuration;
import org.apache.flink.cdc.common.data.binary.BinaryStringData;
import org.apache.flink.cdc.common.event.AddColumnEvent;
import org.apache.flink.cdc.common.event.AlterColumnTypeEvent;
@@ -28,6 +29,8 @@
import org.apache.flink.cdc.common.event.Event;
import org.apache.flink.cdc.common.event.RenameColumnEvent;
import org.apache.flink.cdc.common.event.TableId;
+import org.apache.flink.cdc.common.factories.Factory;
+import org.apache.flink.cdc.common.factories.FactoryHelper;
import org.apache.flink.cdc.common.schema.Column;
import org.apache.flink.cdc.common.schema.Schema;
import org.apache.flink.cdc.common.source.FlinkSourceProvider;
@@ -60,10 +63,19 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.stream.Stream;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.HOSTNAME;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.INCLUDE_COMMENTS_ENABLED;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.PASSWORD;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.PORT;
import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.SCHEMA_CHANGE_ENABLED;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.SERVER_TIME_ZONE;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.TABLES;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.USERNAME;
import static org.apache.flink.cdc.connectors.mysql.testutils.MySqSourceTestUtils.TEST_PASSWORD;
import static org.apache.flink.cdc.connectors.mysql.testutils.MySqSourceTestUtils.TEST_USER;
import static org.apache.flink.cdc.connectors.mysql.testutils.MySqSourceTestUtils.fetchResults;
@@ -350,6 +362,73 @@ public void testParseAlterStatement() throws Exception {
assertThat(actual).isEqualTo(expected);
}
+ @Test
+ public void testIncludeComments() throws Exception {
+ inventoryDatabase.createAndInitialize();
+ List expected = new ArrayList<>();
+ TableId tableId =
+ TableId.tableId(inventoryDatabase.getDatabaseName(), "products_with_comments");
+ try (Connection connection = inventoryDatabase.getJdbcConnection();
+ Statement statement = connection.createStatement()) {
+ String createTableSql =
+ String.format(
+ "CREATE TABLE IF NOT EXISTS `%s`.`%s` (\n"
+ + " id INTEGER NOT NULL AUTO_INCREMENT COMMENT 'column comment of id' PRIMARY KEY,\n"
+ + " name VARCHAR(255) NOT NULL DEFAULT 'flink' COMMENT 'column comment of name',\n"
+ + " weight FLOAT(6) COMMENT 'column comment of weight'\n"
+ + ")\n"
+ + "COMMENT 'table comment of products';",
+ inventoryDatabase.getDatabaseName(), "products_with_comments");
+
+ statement.execute(createTableSql);
+ expected.add(getProductsWithCommentsCreateTableEvent(tableId));
+
+ // add some column
+ statement.execute(
+ String.format(
+ "ALTER TABLE `%s`.`products_with_comments` ADD COLUMN `description` VARCHAR(512) comment 'column comment of description';",
+ inventoryDatabase.getDatabaseName()));
+
+ expected.add(
+ new AddColumnEvent(
+ tableId,
+ Collections.singletonList(
+ new AddColumnEvent.ColumnWithPosition(
+ Column.physicalColumn(
+ "description",
+ DataTypes.VARCHAR(512),
+ "column comment of description")))));
+ }
+
+ Map options = new HashMap<>();
+ options.put(HOSTNAME.key(), MYSQL8_CONTAINER.getHost());
+ options.put(PORT.key(), String.valueOf(MYSQL8_CONTAINER.getDatabasePort()));
+ options.put(USERNAME.key(), TEST_USER);
+ options.put(PASSWORD.key(), TEST_PASSWORD);
+ options.put(SERVER_TIME_ZONE.key(), "UTC");
+ options.put(INCLUDE_COMMENTS_ENABLED.key(), "true");
+ options.put(TABLES.key(), inventoryDatabase.getDatabaseName() + ".products_with_comments");
+ Factory.Context context =
+ new FactoryHelper.DefaultContext(
+ Configuration.fromMap(options), null, this.getClass().getClassLoader());
+
+ MySqlDataSourceFactory factory = new MySqlDataSourceFactory();
+ MySqlDataSource dataSource = (MySqlDataSource) factory.createDataSource(context);
+ FlinkSourceProvider sourceProvider =
+ (FlinkSourceProvider) dataSource.getEventSourceProvider();
+
+ CloseableIterator events =
+ env.fromSource(
+ sourceProvider.getSource(),
+ WatermarkStrategy.noWatermarks(),
+ MySqlDataSourceFactory.IDENTIFIER,
+ new EventTypeInfo())
+ .executeAndCollect();
+
+ List actual = fetchResults(events, expected.size());
+ assertThat(actual).containsExactly(expected.toArray(new Event[0]));
+ }
+
private CreateTableEvent getProductsCreateTableEvent(TableId tableId) {
return new CreateTableEvent(
tableId,
@@ -362,6 +441,19 @@ private CreateTableEvent getProductsCreateTableEvent(TableId tableId) {
.build());
}
+ private CreateTableEvent getProductsWithCommentsCreateTableEvent(TableId tableId) {
+ return new CreateTableEvent(
+ tableId,
+ Schema.newBuilder()
+ .physicalColumn("id", DataTypes.INT().notNull(), "column comment of id")
+ .physicalColumn(
+ "name", DataTypes.VARCHAR(255).notNull(), "column comment of name")
+ .physicalColumn("weight", DataTypes.FLOAT(), "column comment of weight")
+ .primaryKey(Collections.singletonList("id"))
+ .comment("table comment of products")
+ .build());
+ }
+
private List getSnapshotExpected(TableId tableId) {
RowType rowType =
RowType.of(
From 7147bcc32917e346f04ad09d676772a3a600c9ce Mon Sep 17 00:00:00 2001
From: North Lin <240383347@qq.com>
Date: Fri, 19 Jul 2024 18:25:25 +0800
Subject: [PATCH 02/11] [FLINK-34865][pipeline-connector]support sync table and
column comments
update test case
---
.../mysql/source/MySqlPipelineITCase.java | 72 ++++++++++---------
1 file changed, 39 insertions(+), 33 deletions(-)
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java
index f10092aa75a..17a6c8d33df 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java
@@ -365,40 +365,19 @@ public void testParseAlterStatement() throws Exception {
@Test
public void testIncludeComments() throws Exception {
inventoryDatabase.createAndInitialize();
- List expected = new ArrayList<>();
TableId tableId =
TableId.tableId(inventoryDatabase.getDatabaseName(), "products_with_comments");
- try (Connection connection = inventoryDatabase.getJdbcConnection();
- Statement statement = connection.createStatement()) {
- String createTableSql =
- String.format(
- "CREATE TABLE IF NOT EXISTS `%s`.`%s` (\n"
- + " id INTEGER NOT NULL AUTO_INCREMENT COMMENT 'column comment of id' PRIMARY KEY,\n"
- + " name VARCHAR(255) NOT NULL DEFAULT 'flink' COMMENT 'column comment of name',\n"
- + " weight FLOAT(6) COMMENT 'column comment of weight'\n"
- + ")\n"
- + "COMMENT 'table comment of products';",
- inventoryDatabase.getDatabaseName(), "products_with_comments");
-
- statement.execute(createTableSql);
- expected.add(getProductsWithCommentsCreateTableEvent(tableId));
-
- // add some column
- statement.execute(
- String.format(
- "ALTER TABLE `%s`.`products_with_comments` ADD COLUMN `description` VARCHAR(512) comment 'column comment of description';",
- inventoryDatabase.getDatabaseName()));
- expected.add(
- new AddColumnEvent(
- tableId,
- Collections.singletonList(
- new AddColumnEvent.ColumnWithPosition(
- Column.physicalColumn(
- "description",
- DataTypes.VARCHAR(512),
- "column comment of description")))));
- }
+ String createTableSql =
+ String.format(
+ "CREATE TABLE IF NOT EXISTS `%s`.`%s` (\n"
+ + " id INTEGER NOT NULL AUTO_INCREMENT COMMENT 'column comment of id' PRIMARY KEY,\n"
+ + " name VARCHAR(255) NOT NULL DEFAULT 'flink' COMMENT 'column comment of name',\n"
+ + " weight FLOAT(6) COMMENT 'column comment of weight'\n"
+ + ")\n"
+ + "COMMENT 'table comment of products';",
+ inventoryDatabase.getDatabaseName(), "products_with_comments");
+ executeSql(inventoryDatabase, createTableSql);
Map options = new HashMap<>();
options.put(HOSTNAME.key(), MYSQL8_CONTAINER.getHost());
@@ -425,8 +404,24 @@ public void testIncludeComments() throws Exception {
new EventTypeInfo())
.executeAndCollect();
- List actual = fetchResults(events, expected.size());
- assertThat(actual).containsExactly(expected.toArray(new Event[0]));
+ List actual = fetchResults(events, 1);
+ assertThat(actual.get(0)).isEqualTo(getProductsWithCommentsCreateTableEvent(tableId));
+
+ // add some column
+ String addColumnSql =
+ String.format(
+ "ALTER TABLE `%s`.`products_with_comments` ADD COLUMN `description` VARCHAR(512) comment 'column comment of description';",
+ inventoryDatabase.getDatabaseName());
+ executeSql(inventoryDatabase, addColumnSql);
+ actual = fetchResults(events, 1);
+ assertThat(actual.get(0)).isEqualTo(getProductsWithCommentsAddColumnEvent(tableId));
+ }
+
+ private void executeSql(UniqueDatabase database, String sql) throws SQLException {
+ try (Connection connection = database.getJdbcConnection();
+ Statement statement = connection.createStatement()) {
+ statement.execute(sql);
+ }
}
private CreateTableEvent getProductsCreateTableEvent(TableId tableId) {
@@ -454,6 +449,17 @@ private CreateTableEvent getProductsWithCommentsCreateTableEvent(TableId tableId
.build());
}
+ private AddColumnEvent getProductsWithCommentsAddColumnEvent(TableId tableId) {
+ return new AddColumnEvent(
+ tableId,
+ Collections.singletonList(
+ new AddColumnEvent.ColumnWithPosition(
+ Column.physicalColumn(
+ "description",
+ DataTypes.VARCHAR(512),
+ "column comment of description"))));
+ }
+
private List getSnapshotExpected(TableId tableId) {
RowType rowType =
RowType.of(
From a27d2e522d93d10106deb97a23362e7e1298de1d Mon Sep 17 00:00:00 2001
From: "north.lin"
Date: Thu, 8 Aug 2024 17:40:44 +0800
Subject: [PATCH 03/11] [FLINK-34865][pipeline-connector]support sync table and
column comments
add docs for `charset-encoding` param
---
.../docs/connectors/pipeline-connectors/doris.md | 7 +++++++
docs/content/docs/connectors/pipeline-connectors/doris.md | 7 +++++++
2 files changed, 14 insertions(+)
diff --git a/docs/content.zh/docs/connectors/pipeline-connectors/doris.md b/docs/content.zh/docs/connectors/pipeline-connectors/doris.md
index bdbb5fe2c83..0f3bcd0fe88 100644
--- a/docs/content.zh/docs/connectors/pipeline-connectors/doris.md
+++ b/docs/content.zh/docs/connectors/pipeline-connectors/doris.md
@@ -119,6 +119,13 @@ pipeline:
String |
是否通过FE重定向写入,直连BE写入 |
+
+ | charset-encoding |
+ optional |
+ false |
+ Boolean |
+ Doris Http客户端字符集编码,默认UTF-8 |
+
| sink.enable.batch-mode |
optional |
diff --git a/docs/content/docs/connectors/pipeline-connectors/doris.md b/docs/content/docs/connectors/pipeline-connectors/doris.md
index b5048eaf919..6ecb2515b34 100644
--- a/docs/content/docs/connectors/pipeline-connectors/doris.md
+++ b/docs/content/docs/connectors/pipeline-connectors/doris.md
@@ -119,6 +119,13 @@ pipeline:
String |
Whether to write through FE redirection and directly connect to BE to write |
+
+ | charset-encoding |
+ optional |
+ false |
+ Boolean |
+ Charset encoding for doris http client, default UTF-8 |
+
| sink.enable.batch-mode |
optional |
From 11f8062740026fd2edce25bca3108c521300ae06 Mon Sep 17 00:00:00 2001
From: "north.lin"
Date: Fri, 9 Aug 2024 16:34:26 +0800
Subject: [PATCH 04/11] update test cases and support paimon table comment
---
.../composer/flink/FlinkPipelineComposerITCase.java | 10 +++++-----
.../connectors/mysql/source/MySqlPipelineITCase.java | 5 ++++-
.../connectors/paimon/sink/PaimonMetadataApplier.java | 1 +
.../values/sink/ValuesDataSinkHelperTest.java | 2 +-
4 files changed, 11 insertions(+), 7 deletions(-)
diff --git a/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java b/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java
index 0830b52361a..d62408f8908 100644
--- a/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java
+++ b/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java
@@ -522,11 +522,11 @@ void testIdenticalOneToOneRouting() throws Exception {
"default_namespace.default_schema.table2:col1=3;col2=3");
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split("\n");
+ String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
- "CreateTableEvent{tableId=default_namespace.default_schema.table2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[1, 1], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[2, 2], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[3, 3], op=INSERT, meta=()}",
@@ -911,10 +911,10 @@ void testTransformMergingWithRoute() throws Exception {
.physicalColumn("gender", DataTypes.STRING())
.primaryKey("id")
.build());
- String[] outputEvents = outCaptor.toString().trim().split("\n");
+ String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.merged, schema=columns={`id` INT,`name` STRING,`age` INT,`last_name` STRING}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.merged, schema=columns={`id` INT,`name` STRING,`age` INT,`last_name` STRING}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.merged, before=[], after=[1, Alice, 18, last_name], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.merged, before=[], after=[2, Bob, 20, last_name], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.merged, before=[2, Bob, 20, last_name], after=[2, Bob, 30, last_name], op=UPDATE, meta=()}",
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java
index afe048ed8b3..97b2f48e54b 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java
@@ -442,7 +442,10 @@ private CreateTableEvent getProductsWithCommentsCreateTableEvent(TableId tableId
Schema.newBuilder()
.physicalColumn("id", DataTypes.INT().notNull(), "column comment of id")
.physicalColumn(
- "name", DataTypes.VARCHAR(255).notNull(), "column comment of name")
+ "name",
+ DataTypes.VARCHAR(255).notNull(),
+ "column comment of name",
+ "flink")
.physicalColumn("weight", DataTypes.FLOAT(), "column comment of weight")
.primaryKey(Collections.singletonList("id"))
.comment("table comment of products")
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/main/java/org/apache/flink/cdc/connectors/paimon/sink/PaimonMetadataApplier.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/main/java/org/apache/flink/cdc/connectors/paimon/sink/PaimonMetadataApplier.java
index 3ae3c9c4c73..8b1e4b0fbfa 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/main/java/org/apache/flink/cdc/connectors/paimon/sink/PaimonMetadataApplier.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/main/java/org/apache/flink/cdc/connectors/paimon/sink/PaimonMetadataApplier.java
@@ -159,6 +159,7 @@ private void applyCreateTable(CreateTableEvent event)
} else if (schema.partitionKeys() != null && !schema.partitionKeys().isEmpty()) {
builder.partitionKeys(schema.partitionKeys());
}
+ builder.comment(schema.comment());
builder.options(tableOptions);
builder.options(schema.options());
catalog.createTable(
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/test/java/org/apache/flink/cdc/connectors/values/sink/ValuesDataSinkHelperTest.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/test/java/org/apache/flink/cdc/connectors/values/sink/ValuesDataSinkHelperTest.java
index 238f0756456..4493453dee3 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/test/java/org/apache/flink/cdc/connectors/values/sink/ValuesDataSinkHelperTest.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/test/java/org/apache/flink/cdc/connectors/values/sink/ValuesDataSinkHelperTest.java
@@ -50,7 +50,7 @@ public void testConvertEventToStr() {
List fieldGetters = SchemaUtils.createFieldGetters(schema);
Assert.assertEquals(
- "CreateTableEvent{tableId=default.default.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
+ "CreateTableEvent{tableId=default.default.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
ValuesDataSinkHelper.convertEventToStr(
new CreateTableEvent(tableId, schema), fieldGetters));
From 4fdf2a9919ab99a7eb467cf083c8d481c1de2831 Mon Sep 17 00:00:00 2001
From: "north.lin" <37775475+qg-lin@users.noreply.github.com>
Date: Sun, 12 Jan 2025 00:21:30 +0800
Subject: [PATCH 05/11] format code & update case
---
.../flink/FlinkPipelineComposerITCase.java | 22 ++--
.../pom.xml | 1 -
.../mysql/source/MySqlDataSourceOptions.java | 2 +-
.../mysql/source/MySqlEventDeserializer.java | 2 +-
.../mysql/source/MySqlPipelineITCase.java | 101 +++++++++++++++++-
.../paimon/sink/PaimonMetadataApplier.java | 1 +
6 files changed, 114 insertions(+), 15 deletions(-)
diff --git a/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java b/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java
index b2da81e5e8a..ae1c2a3b5d4 100644
--- a/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java
+++ b/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java
@@ -410,10 +410,10 @@ void testOpTypeMetadataColumn(ValuesDataSink.SinkApi sinkApi) throws Exception {
execution.execute();
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split("\n");
+ String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING NOT NULL,`col2` STRING,`col12` STRING,`rk` STRING NOT NULL,`opts` BIGINT NOT NULL}, primaryKeys=col1, partitionKeys=col12, options=({key1=value1})}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING NOT NULL,`col2` STRING,`col12` STRING,`rk` STRING NOT NULL,`opts` BIGINT NOT NULL}, primaryKeys=col1, partitionKeys=col12, comment=null, options=({key1=value1})}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[1, 1, 10, +I, 1], op=INSERT, meta=({op_ts=1})}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[2, 2, 20, +I, 2], op=INSERT, meta=({op_ts=2})}",
"AddColumnEvent{tableId=default_namespace.default_schema.table1, addedColumns=[ColumnWithPosition{column=`col3` STRING, position=AFTER, existedColumnName=col2}]}",
@@ -1250,10 +1250,10 @@ void testTransformMergingWithRouteChangeOrder(ValuesDataSink.SinkApi sinkApi) th
.physicalColumn("gender", DataTypes.STRING())
.primaryKey("id")
.build());
- String[] outputEvents = outCaptor.toString().trim().split("\n");
+ String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.merged, schema=columns={`id` INT NOT NULL,`name` STRING,`age` INT,`last_name` STRING}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.merged, schema=columns={`id` INT NOT NULL,`name` STRING,`age` INT,`last_name` STRING}, primaryKeys=id, comment=null, options=()}",
"AddColumnEvent{tableId=default_namespace.default_schema.merged, addedColumns=[ColumnWithPosition{column=`description` STRING, position=AFTER, existedColumnName=last_name}]}",
"AlterColumnTypeEvent{tableId=default_namespace.default_schema.merged, typeMapping={id=BIGINT NOT NULL}, oldTypeMapping={id=INT NOT NULL}}",
"DataChangeEvent{tableId=default_namespace.default_schema.merged, before=[], after=[1, Alice, 18, last_name, null], op=INSERT, meta=()}",
@@ -1392,12 +1392,12 @@ void testMergingTemporalTypesWithPromotedPrecisions(ValuesDataSink.SinkApi sinkA
execution.execute();
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split("\n");
+ String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
String[] expected =
Stream.of(
// Merging timestamp with different precision
- "CreateTableEvent{tableId={}_table_timestamp_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP(0)}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId={}_table_timestamp_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP(0)}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId={}_table_timestamp_merged, before=[], after=[1, Alice, 17, 2020-01-01T14:28:57], op=INSERT, meta=()}",
"AlterColumnTypeEvent{tableId={}_table_timestamp_merged, typeMapping={birthday=TIMESTAMP(9)}, oldTypeMapping={birthday=TIMESTAMP(0)}}",
"DataChangeEvent{tableId={}_table_timestamp_merged, before=[], after=[2, Alice, 17, 2020-01-01T14:28:57.123456789], op=INSERT, meta=()}",
@@ -1405,7 +1405,7 @@ void testMergingTemporalTypesWithPromotedPrecisions(ValuesDataSink.SinkApi sinkA
"DataChangeEvent{tableId={}_table_timestamp_merged, before=[], after=[102, Zen, 19, 2020-01-01T14:28:57.123456789], op=INSERT, meta=()}",
// Merging zoned timestamp with different precision
- "CreateTableEvent{tableId={}_table_zoned_timestamp_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP(0) WITH TIME ZONE}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId={}_table_zoned_timestamp_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP(0) WITH TIME ZONE}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId={}_table_zoned_timestamp_merged, before=[], after=[3, Alice, 17, 2020-01-01T14:28:57Z], op=INSERT, meta=()}",
"AlterColumnTypeEvent{tableId={}_table_zoned_timestamp_merged, typeMapping={birthday=TIMESTAMP(9) WITH TIME ZONE}, oldTypeMapping={birthday=TIMESTAMP(0) WITH TIME ZONE}}",
"DataChangeEvent{tableId={}_table_zoned_timestamp_merged, before=[], after=[4, Alice, 17, 2020-01-01T14:28:57.123456789Z], op=INSERT, meta=()}",
@@ -1413,7 +1413,7 @@ void testMergingTemporalTypesWithPromotedPrecisions(ValuesDataSink.SinkApi sinkA
"DataChangeEvent{tableId={}_table_zoned_timestamp_merged, before=[], after=[104, Zen, 19, 2020-01-01T14:28:57.123456789Z], op=INSERT, meta=()}",
// Merging local-zoned timestamp with different precision
- "CreateTableEvent{tableId={}_table_local_zoned_timestamp_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP_LTZ(0)}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId={}_table_local_zoned_timestamp_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP_LTZ(0)}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId={}_table_local_zoned_timestamp_merged, before=[], after=[5, Alice, 17, 2020-01-01T14:28:57], op=INSERT, meta=()}",
"AlterColumnTypeEvent{tableId={}_table_local_zoned_timestamp_merged, typeMapping={birthday=TIMESTAMP_LTZ(9)}, oldTypeMapping={birthday=TIMESTAMP_LTZ(0)}}",
"DataChangeEvent{tableId={}_table_local_zoned_timestamp_merged, before=[], after=[6, Alice, 17, 2020-01-01T14:28:57.123456789], op=INSERT, meta=()}",
@@ -1421,7 +1421,7 @@ void testMergingTemporalTypesWithPromotedPrecisions(ValuesDataSink.SinkApi sinkA
"DataChangeEvent{tableId={}_table_local_zoned_timestamp_merged, before=[], after=[106, Zen, 19, 2020-01-01T14:28:57.123456789], op=INSERT, meta=()}",
// Merging all
- "CreateTableEvent{tableId={}_everything_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP(0)}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId={}_everything_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP(0)}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId={}_everything_merged, before=[], after=[1, Alice, 17, 2020-01-01T14:28:57], op=INSERT, meta=()}",
"AlterColumnTypeEvent{tableId={}_everything_merged, typeMapping={birthday=TIMESTAMP(9)}, oldTypeMapping={birthday=TIMESTAMP(0)}}",
"DataChangeEvent{tableId={}_everything_merged, before=[], after=[2, Alice, 17, 2020-01-01T14:28:57.123456789], op=INSERT, meta=()}",
@@ -1489,11 +1489,11 @@ void testMergingDecimalWithVariousPrecisions(ValuesDataSink.SinkApi sinkApi) thr
execution.execute();
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split("\n");
+ String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
String[] expected =
Stream.of(
- "CreateTableEvent{tableId={}, schema=columns={`id` INT,`name` STRING,`age` INT,`fav_num` TINYINT}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId={}, schema=columns={`id` INT,`name` STRING,`age` INT,`fav_num` TINYINT}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId={}, before=[], after=[1, Alice, 17, 1], op=INSERT, meta=()}",
"AlterColumnTypeEvent{tableId={}, typeMapping={fav_num=SMALLINT}, oldTypeMapping={fav_num=TINYINT}}",
"DataChangeEvent{tableId={}, before=[], after=[2, Alice, 17, 22], op=INSERT, meta=()}",
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/pom.xml b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/pom.xml
index 2015700d085..3257debfe15 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/pom.xml
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/pom.xml
@@ -28,7 +28,6 @@ limitations under the License.
24.0.1
- 1.18.3
8.0.26
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSourceOptions.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSourceOptions.java
index 5feef1b6d00..0b912f64c59 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSourceOptions.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSourceOptions.java
@@ -281,7 +281,7 @@ public class MySqlDataSourceOptions {
.withDescription(
"List of readable metadata from SourceRecord to be passed to downstream, split by `,`. "
+ "Available readable metadata are: op_ts.");
-
+
@Experimental
public static final ConfigOption INCLUDE_COMMENTS_ENABLED =
ConfigOptions.key("include-comments.enabled")
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlEventDeserializer.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlEventDeserializer.java
index fad8ae970b1..004fc5e1abe 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlEventDeserializer.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlEventDeserializer.java
@@ -68,7 +68,7 @@ public class MySqlEventDeserializer extends DebeziumEventDeserializationSchema {
private transient CustomMySqlAntlrDdlParser customParser;
private List readableMetadataList;
-
+
public MySqlEventDeserializer(
DebeziumChangelogMode changelogMode, boolean includeSchemaChanges) {
this(changelogMode, includeSchemaChanges, new ArrayList<>(), false);
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java
index 166b724aeca..1252733f4ab 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlPipelineITCase.java
@@ -75,7 +75,14 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.HOSTNAME;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.INCLUDE_COMMENTS_ENABLED;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.PASSWORD;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.PORT;
import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.SCHEMA_CHANGE_ENABLED;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.SERVER_TIME_ZONE;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.TABLES;
+import static org.apache.flink.cdc.connectors.mysql.source.MySqlDataSourceOptions.USERNAME;
import static org.apache.flink.cdc.connectors.mysql.testutils.MySqSourceTestUtils.TEST_PASSWORD;
import static org.apache.flink.cdc.connectors.mysql.testutils.MySqSourceTestUtils.TEST_USER;
import static org.apache.flink.cdc.connectors.mysql.testutils.MySqSourceTestUtils.fetchResults;
@@ -931,6 +938,98 @@ public void testDanglingDropTableEventInBinlog() throws Exception {
actual.stream().map(Object::toString).collect(Collectors.toList()));
}
+ @Test
+ public void testIncludeComments() throws Exception {
+ env.setParallelism(1);
+ inventoryDatabase.createAndInitialize();
+ TableId tableId =
+ TableId.tableId(inventoryDatabase.getDatabaseName(), "products_with_comments");
+
+ String createTableSql =
+ String.format(
+ "CREATE TABLE IF NOT EXISTS `%s`.`%s` (\n"
+ + " id INTEGER NOT NULL AUTO_INCREMENT COMMENT 'column comment of id' PRIMARY KEY,\n"
+ + " name VARCHAR(255) NOT NULL DEFAULT 'flink' COMMENT 'column comment of name',\n"
+ + " weight FLOAT(6) COMMENT 'column comment of weight'\n"
+ + ")\n"
+ + "COMMENT 'table comment of products';",
+ inventoryDatabase.getDatabaseName(), "products_with_comments");
+ executeSql(inventoryDatabase, createTableSql);
+
+ Map options = new HashMap<>();
+ options.put(HOSTNAME.key(), MYSQL8_CONTAINER.getHost());
+ options.put(PORT.key(), String.valueOf(MYSQL8_CONTAINER.getDatabasePort()));
+ options.put(USERNAME.key(), TEST_USER);
+ options.put(PASSWORD.key(), TEST_PASSWORD);
+ options.put(SERVER_TIME_ZONE.key(), "UTC");
+ options.put(INCLUDE_COMMENTS_ENABLED.key(), "true");
+ options.put(TABLES.key(), inventoryDatabase.getDatabaseName() + ".products_with_comments");
+ Factory.Context context =
+ new FactoryHelper.DefaultContext(
+ Configuration.fromMap(options), null, this.getClass().getClassLoader());
+
+ MySqlDataSourceFactory factory = new MySqlDataSourceFactory();
+ MySqlDataSource dataSource = (MySqlDataSource) factory.createDataSource(context);
+ FlinkSourceProvider sourceProvider =
+ (FlinkSourceProvider) dataSource.getEventSourceProvider();
+
+ CloseableIterator events =
+ env.fromSource(
+ sourceProvider.getSource(),
+ WatermarkStrategy.noWatermarks(),
+ MySqlDataSourceFactory.IDENTIFIER,
+ new EventTypeInfo())
+ .executeAndCollect();
+ Thread.sleep(5_000);
+
+ // add some column
+ String addColumnSql =
+ String.format(
+ "ALTER TABLE `%s`.`products_with_comments` ADD COLUMN `description` VARCHAR(512) comment 'column comment of description';",
+ inventoryDatabase.getDatabaseName());
+ executeSql(inventoryDatabase, addColumnSql);
+
+ List expectedEvents = getEventsWithComments(tableId);
+ List actual = fetchResults(events, expectedEvents.size());
+ assertEqualsInAnyOrder(
+ expectedEvents.stream().map(Object::toString).collect(Collectors.toList()),
+ actual.stream().map(Object::toString).collect(Collectors.toList()));
+ }
+
+ private void executeSql(UniqueDatabase database, String sql) throws SQLException {
+ try (Connection connection = database.getJdbcConnection();
+ Statement statement = connection.createStatement()) {
+ statement.execute(sql);
+ }
+ }
+
+ private List getEventsWithComments(TableId tableId) {
+ return Arrays.asList(
+ new CreateTableEvent(
+ tableId,
+ Schema.newBuilder()
+ .physicalColumn(
+ "id", DataTypes.INT().notNull(), "column comment of id")
+ .physicalColumn(
+ "name",
+ DataTypes.VARCHAR(255).notNull(),
+ "column comment of name",
+ "flink")
+ .physicalColumn(
+ "weight", DataTypes.FLOAT(), "column comment of weight")
+ .primaryKey(Collections.singletonList("id"))
+ .comment("table comment of products")
+ .build()),
+ new AddColumnEvent(
+ tableId,
+ Collections.singletonList(
+ new AddColumnEvent.ColumnWithPosition(
+ Column.physicalColumn(
+ "description",
+ DataTypes.VARCHAR(512),
+ "column comment of description")))));
+ }
+
private CreateTableEvent getProductsCreateTableEvent(TableId tableId) {
return new CreateTableEvent(
tableId,
@@ -1196,4 +1295,4 @@ private List executeAlterAndProvideExpected(TableId tableId, Statement st
inventoryDatabase.getDatabaseName()));
return expected;
}
-}
\ No newline at end of file
+}
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/main/java/org/apache/flink/cdc/connectors/paimon/sink/PaimonMetadataApplier.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/main/java/org/apache/flink/cdc/connectors/paimon/sink/PaimonMetadataApplier.java
index 928e7b6dfcb..2170f631dec 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/main/java/org/apache/flink/cdc/connectors/paimon/sink/PaimonMetadataApplier.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/main/java/org/apache/flink/cdc/connectors/paimon/sink/PaimonMetadataApplier.java
@@ -178,6 +178,7 @@ private void applyCreateTable(CreateTableEvent event) throws SchemaEvolveExcepti
}
builder.partitionKeys(partitionKeys)
.primaryKey(primaryKeys)
+ .comment(schema.comment())
.options(tableOptions)
.options(schema.options());
catalog.createTable(
From a5e3b9659b8919bf4efa70ce4288cc573e7cf48b Mon Sep 17 00:00:00 2001
From: "north.lin" <37775475+qg-lin@users.noreply.github.com>
Date: Sun, 12 Jan 2025 15:06:17 +0800
Subject: [PATCH 06/11] update case
---
.../MySqlParallelizedPipelineITCase.java | 2 +-
.../cdc/pipeline/tests/MysqlE2eITCase.java | 16 ++++----
.../cdc/pipeline/tests/RouteE2eITCase.java | 40 +++++++++----------
.../pipeline/tests/SchemaEvolveE2eITCase.java | 2 +-
.../SchemaEvolvingTransformE2eITCase.java | 2 +-
.../pipeline/tests/TransformE2eITCase.java | 32 +++++++--------
.../cdc/pipeline/tests/UdfE2eITCase.java | 8 ++--
7 files changed, 51 insertions(+), 51 deletions(-)
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlParallelizedPipelineITCase.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlParallelizedPipelineITCase.java
index 4bd9806a580..b31e4301812 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlParallelizedPipelineITCase.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlParallelizedPipelineITCase.java
@@ -189,7 +189,7 @@ public void testExtremeParallelizedSchemaChange() throws Exception {
.map(
subTaskId ->
String.format(
- "%d> CreateTableEvent{tableId=%s.TABLE%d, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "%d> CreateTableEvent{tableId=%s.TABLE%d, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
subTaskId,
parallelismDatabase
.getDatabaseName(),
diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MysqlE2eITCase.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MysqlE2eITCase.java
index 976522b601a..d838ce3b783 100644
--- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MysqlE2eITCase.java
+++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MysqlE2eITCase.java
@@ -126,12 +126,12 @@ public void testSyncWholeDatabase() throws Exception {
mysqlInventoryDatabase.getDatabaseName()));
validateResult(
- "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512)}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512)}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[104, user_4, Shanghai, 123567891234], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[103, user_3, Shanghai, 123567891234], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[102, user_2, Shanghai, 123567891234], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[101, user_1, Shanghai, 123567891234], op=INSERT, meta=()}",
- "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[109, spare tire, 24 inch spare tire, 22.2, null, null, null], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[107, rocks, box of assorted rocks, 5.3, null, null, null], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[108, jacket, water resistent black wind breaker, 0.1, null, null, null], op=INSERT, meta=()}",
@@ -235,12 +235,12 @@ public void testSchemaChangeEvents() throws Exception {
mysqlInventoryDatabase.getDatabaseName()));
validateResult(
- "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512)}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512)}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[104, user_4, Shanghai, 123567891234], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[103, user_3, Shanghai, 123567891234], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[102, user_2, Shanghai, 123567891234], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[101, user_1, Shanghai, 123567891234], op=INSERT, meta=()}",
- "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[109, spare tire, 24 inch spare tire, 22.2, null, null, null], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[107, rocks, box of assorted rocks, 5.3, null, null, null], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[108, jacket, water resistent black wind breaker, 0.1, null, null, null], op=INSERT, meta=()}",
@@ -381,12 +381,12 @@ public void testSoftDelete() throws Exception {
mysqlInventoryDatabase.getDatabaseName()));
validateResult(
- "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512),`op_type` STRING NOT NULL}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512),`op_type` STRING NOT NULL}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[104, user_4, Shanghai, 123567891234, +I], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[103, user_3, Shanghai, 123567891234, +I], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[102, user_2, Shanghai, 123567891234, +I], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[101, user_1, Shanghai, 123567891234, +I], op=INSERT, meta=()}",
- "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING,`op_type` STRING NOT NULL}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING,`op_type` STRING NOT NULL}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[109, spare tire, 24 inch spare tire, 22.2, null, null, null, +I], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[107, rocks, box of assorted rocks, 5.3, null, null, null, +I], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[108, jacket, water resistent black wind breaker, 0.1, null, null, null, +I], op=INSERT, meta=()}",
@@ -546,8 +546,8 @@ public void testDanglingDropTableEventInBinlog() throws Exception {
submitPipelineJob(pipelineJob, mysqlCdcJar, valuesCdcJar, mysqlDriverJar);
waitUntilJobRunning(Duration.ofSeconds(30));
validateResult(
- "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512)}, primaryKeys=id, options=()}",
- "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING}, primaryKeys=id, options=()}");
+ "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512)}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING}, primaryKeys=id, comment=null, options=()}");
}
private void validateResult(String... expectedEvents) throws Exception {
diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/RouteE2eITCase.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/RouteE2eITCase.java
index cd342ef460d..76d8d46f6c9 100644
--- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/RouteE2eITCase.java
+++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/RouteE2eITCase.java
@@ -173,19 +173,19 @@ public void testDefaultRoute() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEGAMMA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEGAMMA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
validateResult(
@@ -270,7 +270,7 @@ public void testMergeTableRoute() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.ALL, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.ALL, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
validateResult(
@@ -354,17 +354,17 @@ public void testPartialRoute() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.ALPHABET, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.ALPHABET, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEGAMMA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEGAMMA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
validateResult(
@@ -454,17 +454,17 @@ public void testMultipleRoute() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.ALPHABET, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.ALPHABET, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.BETAGAMM, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.BETAGAMM, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
validateResult(
@@ -564,17 +564,17 @@ public void testOneToManyRoute() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.TABLEA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.TABLEA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.TABLEB, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.TABLEB, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.TABLEC, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.TABLEC, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
validateResult(
@@ -674,7 +674,7 @@ public void testMergeTableRouteWithTransform() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.ALL, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`EXTRAS` STRING}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.ALL, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`EXTRAS` STRING}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
validateResult(
@@ -760,19 +760,19 @@ public void testReplacementSymbol() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.NEW_TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.NEW_TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.NEW_TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.NEW_TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.NEW_TABLEGAMMA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.NEW_TABLEGAMMA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.NEW_TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.NEW_TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
routeTestDatabase.getDatabaseName()));
validateResult(
@@ -880,7 +880,7 @@ public void testExtremeMergeTableRoute() throws Exception {
i ->
String.format(
prefix
- + "CreateTableEvent{tableId=%s.TABLE%d, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ + "CreateTableEvent{tableId=%s.TABLE%d, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
databaseName,
i))
.toArray(String[]::new));
diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolveE2eITCase.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolveE2eITCase.java
index 98e4deba9a5..20e902d624d 100644
--- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolveE2eITCase.java
+++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolveE2eITCase.java
@@ -577,7 +577,7 @@ private void testGenericSchemaEvolution(
private void validateSnapshotData(String dbName, String tableName) throws Exception {
List expected =
Stream.of(
- "CreateTableEvent{tableId=%s.%s, schema=columns={`id` INT NOT NULL,`name` VARCHAR(17),`age` INT}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId=%s.%s, schema=columns={`id` INT NOT NULL,`name` VARCHAR(17),`age` INT}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId=%s.%s, before=[], after=[1008, Alice, 21], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.%s, before=[], after=[1009, Bob, 20], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.%s, before=[], after=[1010, Carol, 19], op=INSERT, meta=()}",
diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolvingTransformE2eITCase.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolvingTransformE2eITCase.java
index 7523387cf70..2ff72be7cea 100644
--- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolvingTransformE2eITCase.java
+++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolvingTransformE2eITCase.java
@@ -389,7 +389,7 @@ private void testGenericSchemaEvolution(
private void validateSnapshotData(String dbName, String tableName) throws Exception {
List expected =
Stream.of(
- "CreateTableEvent{tableId=%s.%s, schema=columns={`uid` STRING,`id` INT NOT NULL,`name` VARCHAR(17),`age` INT,`id_square` INT,`tag` STRING}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId=%s.%s, schema=columns={`uid` STRING,`id` INT NOT NULL,`name` VARCHAR(17),`age` INT,`id_square` INT,`tag` STRING}, primaryKeys=id, comment=null, options=()}",
"DataChangeEvent{tableId=%s.%s, before=[], after=[1009 -> Bob, 1009, Bob, 20, -1018081, age >= 20], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.%s, before=[], after=[1008 -> Alice, 1008, Alice, 21, -1016064, age >= 20], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.%s, before=[], after=[1011 -> Derrida, 1011, Derrida, 18, 1022121, age < 20], op=INSERT, meta=()}",
diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/TransformE2eITCase.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/TransformE2eITCase.java
index 8b292ff850a..37c878b5870 100644
--- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/TransformE2eITCase.java
+++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/TransformE2eITCase.java
@@ -138,7 +138,7 @@ public void testHeteroSchemaTransform() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.terminus, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.terminus, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -219,13 +219,13 @@ public void testMultipleTransformRule() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CATEGORY` STRING}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CATEGORY` STRING}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CATEGORY` STRING}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CATEGORY` STRING}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -309,7 +309,7 @@ public void testAssortedSchemaTransform() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.terminus, schema=columns={`ID` INT NOT NULL,`VERSION` STRING,`NAME` STRING}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.terminus, schema=columns={`ID` INT NOT NULL,`VERSION` STRING,`NAME` STRING}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -386,13 +386,13 @@ public void testWildcardSchemaTransform() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` STRING,`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`NAME` STRING}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` STRING,`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`NAME` STRING}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` STRING,`CODENAMESBETA` VARCHAR(17),`AGEBETA` INT,`NAMEBETA` VARCHAR(128),`NAME` STRING}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` STRING,`CODENAMESBETA` VARCHAR(17),`AGEBETA` INT,`NAMEBETA` VARCHAR(128),`NAME` STRING}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -469,13 +469,13 @@ public void testWildcardWithMetadataColumnTransform() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`identifier_name` STRING,`type` STRING NOT NULL,`opts` BIGINT NOT NULL}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`identifier_name` STRING,`type` STRING NOT NULL,`opts` BIGINT NOT NULL}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CODENAMESBETA` VARCHAR(17),`AGEBETA` INT,`NAMEBETA` VARCHAR(128),`identifier_name` STRING,`type` STRING NOT NULL,`opts` BIGINT NOT NULL}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CODENAMESBETA` VARCHAR(17),`AGEBETA` INT,`NAMEBETA` VARCHAR(128),`identifier_name` STRING,`type` STRING NOT NULL,`opts` BIGINT NOT NULL}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -553,13 +553,13 @@ public void testMultipleHittingTable() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`UID` INT,`NEWVERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`UID` INT,`NEWVERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CODENAMESBETA` VARCHAR(17),`AGEBETA` INT,`NAMEBETA` VARCHAR(128),`UID` INT,`NEWVERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CODENAMESBETA` VARCHAR(17),`AGEBETA` INT,`NAMEBETA` VARCHAR(128),`UID` INT,`NEWVERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -639,7 +639,7 @@ public void testMultipleTransformWithDiffRefColumn() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`ROLENAME` STRING}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`ROLENAME` STRING}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -714,12 +714,12 @@ public void testTransformWithCast() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` DOUBLE,`IDENTIFIER` STRING}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` DOUBLE,`IDENTIFIER` STRING}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` DOUBLE,`IDENTIFIER` STRING}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` DOUBLE,`IDENTIFIER` STRING}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -833,7 +833,7 @@ public void testTransformWithSchemaEvolution() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`UID` STRING,`PRICE` INT}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`UID` STRING,`PRICE` INT}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -932,7 +932,7 @@ public void testTransformWildcardPrefixWithSchemaEvolution() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`UID` STRING}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`UID` STRING}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -1039,7 +1039,7 @@ public void testTransformWildcardSuffixWithSchemaEvolution() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`UID` STRING,`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128)}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`UID` STRING,`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128)}, primaryKeys=ID, comment=null, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/UdfE2eITCase.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/UdfE2eITCase.java
index 9f5b98298c1..8917964a5be 100644
--- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/UdfE2eITCase.java
+++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/UdfE2eITCase.java
@@ -179,7 +179,7 @@ private void testUserDefinedFunctions(String language) throws Exception {
List expectedEvents =
Arrays.asList(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`INC_ID` STRING,`FMT_VER` STRING}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`INC_ID` STRING,`FMT_VER` STRING}, primaryKeys=ID, comment=null, options=()}",
transformRenameDatabase.getDatabaseName()),
String.format(
"DataChangeEvent{tableId=%s.TABLEALPHA, before=[], after=[1009, 8.1, 1011, <8.1>], op=INSERT, meta=()}",
@@ -191,7 +191,7 @@ private void testUserDefinedFunctions(String language) throws Exception {
"DataChangeEvent{tableId=%s.TABLEALPHA, before=[], after=[1011, 11, 1013, <11>], op=INSERT, meta=()}",
transformRenameDatabase.getDatabaseName()),
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`ANS` STRING,`TYP` STRING}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`ANS` STRING,`TYP` STRING}, primaryKeys=ID, comment=null, options=()}",
transformRenameDatabase.getDatabaseName()),
String.format(
"DataChangeEvent{tableId=%s.TABLEBETA, before=[], after=[2011, 11, Forty-two, Integer: 2011], op=INSERT, meta=()}",
@@ -311,7 +311,7 @@ private void testFlinkCompatibleScalarFunctions(String language) throws Exceptio
List expectedEvents =
Arrays.asList(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`INC_ID` STRING,`FMT_VER` STRING}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`INC_ID` STRING,`FMT_VER` STRING}, primaryKeys=ID, comment=null, options=()}",
transformRenameDatabase.getDatabaseName()),
String.format(
"DataChangeEvent{tableId=%s.TABLEALPHA, before=[], after=[1009, 8.1, 1011, <8.1>], op=INSERT, meta=()}",
@@ -323,7 +323,7 @@ private void testFlinkCompatibleScalarFunctions(String language) throws Exceptio
"DataChangeEvent{tableId=%s.TABLEALPHA, before=[], after=[1011, 11, 1013, <11>], op=INSERT, meta=()}",
transformRenameDatabase.getDatabaseName()),
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`TYP` STRING}, primaryKeys=ID, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`TYP` STRING}, primaryKeys=ID, comment=null, options=()}",
transformRenameDatabase.getDatabaseName()),
String.format(
"DataChangeEvent{tableId=%s.TABLEBETA, before=[], after=[2011, 11, Integer: 2011], op=INSERT, meta=()}",
From 2eda3dd87e1c44b732b631f195cba2817052ccc4 Mon Sep 17 00:00:00 2001
From: "north.lin" <37775475+qg-lin@users.noreply.github.com>
Date: Mon, 13 Jan 2025 12:59:29 +0800
Subject: [PATCH 07/11] revert case
---
.../flink/cdc/common/schema/Schema.java | 6 +-
.../flink/FlinkPipelineComposerITCase.java | 67 +++++++++----------
.../MySqlParallelizedPipelineITCase.java | 2 +-
.../values/sink/ValuesDataSinkHelperTest.java | 2 +-
.../cdc/pipeline/tests/MysqlE2eITCase.java | 16 ++---
.../cdc/pipeline/tests/RouteE2eITCase.java | 40 +++++------
.../pipeline/tests/SchemaEvolveE2eITCase.java | 2 +-
.../SchemaEvolvingTransformE2eITCase.java | 2 +-
.../pipeline/tests/TransformE2eITCase.java | 32 ++++-----
.../cdc/pipeline/tests/UdfE2eITCase.java | 8 +--
10 files changed, 90 insertions(+), 87 deletions(-)
diff --git a/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java b/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
index d48e0d06622..2252ed621a1 100644
--- a/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
+++ b/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
@@ -25,6 +25,8 @@
import org.apache.flink.cdc.common.types.RowType;
import org.apache.flink.cdc.common.utils.Preconditions;
+import org.apache.commons.lang3.StringUtils;
+
import javax.annotation.Nullable;
import java.io.Serializable;
@@ -239,7 +241,9 @@ public String toString() {
if (!partitionKeys.isEmpty()) {
sb.append(", partitionKeys=").append(String.join(";", partitionKeys));
}
- sb.append(", comment=").append(comment);
+ if (StringUtils.isNotBlank(comment)) {
+ sb.append(", comment=").append(comment);
+ }
sb.append(", options=").append(describeOptions());
return sb.toString();
diff --git a/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java b/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java
index ae1c2a3b5d4..55a205d495a 100644
--- a/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java
+++ b/flink-cdc-composer/src/test/java/org/apache/flink/cdc/composer/flink/FlinkPipelineComposerITCase.java
@@ -82,7 +82,6 @@
class FlinkPipelineComposerITCase {
private static final int MAX_PARALLELISM = 4;
- private static final String LINE_SEPARATOR = System.lineSeparator();
// Always use parent-first classloader for CDC classes.
// The reason is that ValuesDatabase uses static field for holding data, we need to make sure
@@ -170,10 +169,10 @@ void testSingleSplitSingleTable(ValuesDataSink.SinkApi sinkApi) throws Exception
"default_namespace.default_schema.table1:col1=3;newCol3=");
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
+ String[] outputEvents = outCaptor.toString().trim().split("\n");
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[1, 1], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[2, 2], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[3, 3], op=INSERT, meta=()}",
@@ -235,11 +234,11 @@ void testSingleSplitMultipleTables(ValuesDataSink.SinkApi sinkApi) throws Except
"default_namespace.default_schema.table2:col1=3;col2=3");
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
+ String[] outputEvents = outCaptor.toString().trim().split("\n");
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
- "CreateTableEvent{tableId=default_namespace.default_schema.table2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[1, 1], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[2, 2], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[3, 3], op=INSERT, meta=()}",
@@ -347,10 +346,10 @@ void testTransform(ValuesDataSink.SinkApi sinkApi) throws Exception {
execution.execute();
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
+ String[] outputEvents = outCaptor.toString().trim().split("\n");
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING NOT NULL,`col2` STRING,`col12` STRING}, primaryKeys=col1, partitionKeys=col12, comment=null, options=({key1=value1})}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING NOT NULL,`col2` STRING,`col12` STRING}, primaryKeys=col1, partitionKeys=col12, options=({key1=value1})}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[1, 1, 10], op=INSERT, meta=({op_ts=1})}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[2, 2, 20], op=INSERT, meta=({op_ts=2})}",
"AddColumnEvent{tableId=default_namespace.default_schema.table1, addedColumns=[ColumnWithPosition{column=`col3` STRING, position=AFTER, existedColumnName=col2}]}",
@@ -410,10 +409,10 @@ void testOpTypeMetadataColumn(ValuesDataSink.SinkApi sinkApi) throws Exception {
execution.execute();
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
+ String[] outputEvents = outCaptor.toString().trim().split("\n");
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING NOT NULL,`col2` STRING,`col12` STRING,`rk` STRING NOT NULL,`opts` BIGINT NOT NULL}, primaryKeys=col1, partitionKeys=col12, comment=null, options=({key1=value1})}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING NOT NULL,`col2` STRING,`col12` STRING,`rk` STRING NOT NULL,`opts` BIGINT NOT NULL}, primaryKeys=col1, partitionKeys=col12, options=({key1=value1})}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[1, 1, 10, +I, 1], op=INSERT, meta=({op_ts=1})}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[2, 2, 20, +I, 2], op=INSERT, meta=({op_ts=2})}",
"AddColumnEvent{tableId=default_namespace.default_schema.table1, addedColumns=[ColumnWithPosition{column=`col3` STRING, position=AFTER, existedColumnName=col2}]}",
@@ -482,10 +481,10 @@ void testTransformTwice(ValuesDataSink.SinkApi sinkApi) throws Exception {
execution.execute();
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
+ String[] outputEvents = outCaptor.toString().trim().split("\n");
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING NOT NULL,`col2` STRING,`col12` STRING}, primaryKeys=col1, partitionKeys=col12, comment=null, options=({key1=value1})}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING NOT NULL,`col2` STRING,`col12` STRING}, primaryKeys=col1, partitionKeys=col12, options=({key1=value1})}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[1, 1, 11], op=INSERT, meta=({op_ts=1})}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[2, 2, 22], op=INSERT, meta=({op_ts=2})}",
"AddColumnEvent{tableId=default_namespace.default_schema.table1, addedColumns=[ColumnWithPosition{column=`col3` STRING, position=AFTER, existedColumnName=col2}]}",
@@ -554,11 +553,11 @@ void testOneToOneRouting(ValuesDataSink.SinkApi sinkApi) throws Exception {
"default_namespace.default_schema.routed2:col1=3;col2=3");
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
+ String[] outputEvents = outCaptor.toString().trim().split("\n");
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.routed1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
- "CreateTableEvent{tableId=default_namespace.default_schema.routed2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.routed1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.routed2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.routed1, before=[], after=[1, 1], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.routed1, before=[], after=[2, 2], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.routed1, before=[], after=[3, 3], op=INSERT, meta=()}",
@@ -631,11 +630,11 @@ void testIdenticalOneToOneRouting(ValuesDataSink.SinkApi sinkApi) throws Excepti
"default_namespace.default_schema.table2:col1=3;col2=3");
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
+ String[] outputEvents = outCaptor.toString().trim().split("\n");
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
- "CreateTableEvent{tableId=default_namespace.default_schema.table2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.table2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[1, 1], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[2, 2], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.table1, before=[], after=[3, 3], op=INSERT, meta=()}",
@@ -829,10 +828,10 @@ void testMergingWithRoute(ValuesDataSink.SinkApi sinkApi) throws Exception {
.physicalColumn("gender", DataTypes.STRING())
.primaryKey("id")
.build());
- String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
+ String[] outputEvents = outCaptor.toString().trim().split("\n");
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.merged, schema=columns={`id` INT,`name` STRING,`age` INT}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.merged, schema=columns={`id` INT,`name` STRING,`age` INT}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.merged, before=[], after=[1, Alice, 18], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.merged, before=[], after=[2, Bob, 20], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.merged, before=[2, Bob, 20], after=[2, Bob, 30], op=UPDATE, meta=()}",
@@ -1040,10 +1039,10 @@ void testTransformMergingWithRoute(ValuesDataSink.SinkApi sinkApi) throws Except
.physicalColumn("gender", DataTypes.STRING())
.primaryKey("id")
.build());
- String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
+ String[] outputEvents = outCaptor.toString().trim().split("\n");
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.merged, schema=columns={`id` INT NOT NULL,`name` STRING,`age` INT,`last_name` STRING}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.merged, schema=columns={`id` INT NOT NULL,`name` STRING,`age` INT,`last_name` STRING}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.merged, before=[], after=[1, Alice, 18, last_name], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.merged, before=[], after=[2, Bob, 20, last_name], op=INSERT, meta=()}",
"DataChangeEvent{tableId=default_namespace.default_schema.merged, before=[2, Bob, 20, last_name], after=[2, Bob, 30, last_name], op=UPDATE, meta=()}",
@@ -1250,10 +1249,10 @@ void testTransformMergingWithRouteChangeOrder(ValuesDataSink.SinkApi sinkApi) th
.physicalColumn("gender", DataTypes.STRING())
.primaryKey("id")
.build());
- String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
+ String[] outputEvents = outCaptor.toString().trim().split("\n");
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=default_namespace.default_schema.merged, schema=columns={`id` INT NOT NULL,`name` STRING,`age` INT,`last_name` STRING}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId=default_namespace.default_schema.merged, schema=columns={`id` INT NOT NULL,`name` STRING,`age` INT,`last_name` STRING}, primaryKeys=id, options=()}",
"AddColumnEvent{tableId=default_namespace.default_schema.merged, addedColumns=[ColumnWithPosition{column=`description` STRING, position=AFTER, existedColumnName=last_name}]}",
"AlterColumnTypeEvent{tableId=default_namespace.default_schema.merged, typeMapping={id=BIGINT NOT NULL}, oldTypeMapping={id=INT NOT NULL}}",
"DataChangeEvent{tableId=default_namespace.default_schema.merged, before=[], after=[1, Alice, 18, last_name, null], op=INSERT, meta=()}",
@@ -1310,11 +1309,11 @@ void testRouteWithReplaceSymbol(ValuesDataSink.SinkApi sinkApi) throws Exception
execution.execute();
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
+ String[] outputEvents = outCaptor.toString().trim().split("\n");
assertThat(outputEvents)
.containsExactly(
- "CreateTableEvent{tableId=replaced_namespace.replaced_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
- "CreateTableEvent{tableId=replaced_namespace.replaced_schema.table2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
+ "CreateTableEvent{tableId=replaced_namespace.replaced_schema.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
+ "CreateTableEvent{tableId=replaced_namespace.replaced_schema.table2, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
"DataChangeEvent{tableId=replaced_namespace.replaced_schema.table1, before=[], after=[1, 1], op=INSERT, meta=()}",
"DataChangeEvent{tableId=replaced_namespace.replaced_schema.table1, before=[], after=[2, 2], op=INSERT, meta=()}",
"DataChangeEvent{tableId=replaced_namespace.replaced_schema.table1, before=[], after=[3, 3], op=INSERT, meta=()}",
@@ -1392,12 +1391,12 @@ void testMergingTemporalTypesWithPromotedPrecisions(ValuesDataSink.SinkApi sinkA
execution.execute();
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
+ String[] outputEvents = outCaptor.toString().trim().split("\n");
String[] expected =
Stream.of(
// Merging timestamp with different precision
- "CreateTableEvent{tableId={}_table_timestamp_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP(0)}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId={}_table_timestamp_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP(0)}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId={}_table_timestamp_merged, before=[], after=[1, Alice, 17, 2020-01-01T14:28:57], op=INSERT, meta=()}",
"AlterColumnTypeEvent{tableId={}_table_timestamp_merged, typeMapping={birthday=TIMESTAMP(9)}, oldTypeMapping={birthday=TIMESTAMP(0)}}",
"DataChangeEvent{tableId={}_table_timestamp_merged, before=[], after=[2, Alice, 17, 2020-01-01T14:28:57.123456789], op=INSERT, meta=()}",
@@ -1405,7 +1404,7 @@ void testMergingTemporalTypesWithPromotedPrecisions(ValuesDataSink.SinkApi sinkA
"DataChangeEvent{tableId={}_table_timestamp_merged, before=[], after=[102, Zen, 19, 2020-01-01T14:28:57.123456789], op=INSERT, meta=()}",
// Merging zoned timestamp with different precision
- "CreateTableEvent{tableId={}_table_zoned_timestamp_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP(0) WITH TIME ZONE}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId={}_table_zoned_timestamp_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP(0) WITH TIME ZONE}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId={}_table_zoned_timestamp_merged, before=[], after=[3, Alice, 17, 2020-01-01T14:28:57Z], op=INSERT, meta=()}",
"AlterColumnTypeEvent{tableId={}_table_zoned_timestamp_merged, typeMapping={birthday=TIMESTAMP(9) WITH TIME ZONE}, oldTypeMapping={birthday=TIMESTAMP(0) WITH TIME ZONE}}",
"DataChangeEvent{tableId={}_table_zoned_timestamp_merged, before=[], after=[4, Alice, 17, 2020-01-01T14:28:57.123456789Z], op=INSERT, meta=()}",
@@ -1413,7 +1412,7 @@ void testMergingTemporalTypesWithPromotedPrecisions(ValuesDataSink.SinkApi sinkA
"DataChangeEvent{tableId={}_table_zoned_timestamp_merged, before=[], after=[104, Zen, 19, 2020-01-01T14:28:57.123456789Z], op=INSERT, meta=()}",
// Merging local-zoned timestamp with different precision
- "CreateTableEvent{tableId={}_table_local_zoned_timestamp_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP_LTZ(0)}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId={}_table_local_zoned_timestamp_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP_LTZ(0)}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId={}_table_local_zoned_timestamp_merged, before=[], after=[5, Alice, 17, 2020-01-01T14:28:57], op=INSERT, meta=()}",
"AlterColumnTypeEvent{tableId={}_table_local_zoned_timestamp_merged, typeMapping={birthday=TIMESTAMP_LTZ(9)}, oldTypeMapping={birthday=TIMESTAMP_LTZ(0)}}",
"DataChangeEvent{tableId={}_table_local_zoned_timestamp_merged, before=[], after=[6, Alice, 17, 2020-01-01T14:28:57.123456789], op=INSERT, meta=()}",
@@ -1421,7 +1420,7 @@ void testMergingTemporalTypesWithPromotedPrecisions(ValuesDataSink.SinkApi sinkA
"DataChangeEvent{tableId={}_table_local_zoned_timestamp_merged, before=[], after=[106, Zen, 19, 2020-01-01T14:28:57.123456789], op=INSERT, meta=()}",
// Merging all
- "CreateTableEvent{tableId={}_everything_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP(0)}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId={}_everything_merged, schema=columns={`id` INT,`name` STRING,`age` INT,`birthday` TIMESTAMP(0)}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId={}_everything_merged, before=[], after=[1, Alice, 17, 2020-01-01T14:28:57], op=INSERT, meta=()}",
"AlterColumnTypeEvent{tableId={}_everything_merged, typeMapping={birthday=TIMESTAMP(9)}, oldTypeMapping={birthday=TIMESTAMP(0)}}",
"DataChangeEvent{tableId={}_everything_merged, before=[], after=[2, Alice, 17, 2020-01-01T14:28:57.123456789], op=INSERT, meta=()}",
@@ -1489,11 +1488,11 @@ void testMergingDecimalWithVariousPrecisions(ValuesDataSink.SinkApi sinkApi) thr
execution.execute();
// Check the order and content of all received events
- String[] outputEvents = outCaptor.toString().trim().split(LINE_SEPARATOR);
+ String[] outputEvents = outCaptor.toString().trim().split("\n");
String[] expected =
Stream.of(
- "CreateTableEvent{tableId={}, schema=columns={`id` INT,`name` STRING,`age` INT,`fav_num` TINYINT}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId={}, schema=columns={`id` INT,`name` STRING,`age` INT,`fav_num` TINYINT}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId={}, before=[], after=[1, Alice, 17, 1], op=INSERT, meta=()}",
"AlterColumnTypeEvent{tableId={}, typeMapping={fav_num=SMALLINT}, oldTypeMapping={fav_num=TINYINT}}",
"DataChangeEvent{tableId={}, before=[], after=[2, Alice, 17, 22], op=INSERT, meta=()}",
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlParallelizedPipelineITCase.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlParallelizedPipelineITCase.java
index b31e4301812..4bd9806a580 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlParallelizedPipelineITCase.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/test/java/org/apache/flink/cdc/connectors/mysql/source/MySqlParallelizedPipelineITCase.java
@@ -189,7 +189,7 @@ public void testExtremeParallelizedSchemaChange() throws Exception {
.map(
subTaskId ->
String.format(
- "%d> CreateTableEvent{tableId=%s.TABLE%d, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "%d> CreateTableEvent{tableId=%s.TABLE%d, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
subTaskId,
parallelismDatabase
.getDatabaseName(),
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/test/java/org/apache/flink/cdc/connectors/values/sink/ValuesDataSinkHelperTest.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/test/java/org/apache/flink/cdc/connectors/values/sink/ValuesDataSinkHelperTest.java
index 4493453dee3..238f0756456 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/test/java/org/apache/flink/cdc/connectors/values/sink/ValuesDataSinkHelperTest.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values/src/test/java/org/apache/flink/cdc/connectors/values/sink/ValuesDataSinkHelperTest.java
@@ -50,7 +50,7 @@ public void testConvertEventToStr() {
List fieldGetters = SchemaUtils.createFieldGetters(schema);
Assert.assertEquals(
- "CreateTableEvent{tableId=default.default.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, comment=null, options=()}",
+ "CreateTableEvent{tableId=default.default.table1, schema=columns={`col1` STRING,`col2` STRING}, primaryKeys=col1, options=()}",
ValuesDataSinkHelper.convertEventToStr(
new CreateTableEvent(tableId, schema), fieldGetters));
diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MysqlE2eITCase.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MysqlE2eITCase.java
index d838ce3b783..976522b601a 100644
--- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MysqlE2eITCase.java
+++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MysqlE2eITCase.java
@@ -126,12 +126,12 @@ public void testSyncWholeDatabase() throws Exception {
mysqlInventoryDatabase.getDatabaseName()));
validateResult(
- "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512)}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512)}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[104, user_4, Shanghai, 123567891234], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[103, user_3, Shanghai, 123567891234], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[102, user_2, Shanghai, 123567891234], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[101, user_1, Shanghai, 123567891234], op=INSERT, meta=()}",
- "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[109, spare tire, 24 inch spare tire, 22.2, null, null, null], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[107, rocks, box of assorted rocks, 5.3, null, null, null], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[108, jacket, water resistent black wind breaker, 0.1, null, null, null], op=INSERT, meta=()}",
@@ -235,12 +235,12 @@ public void testSchemaChangeEvents() throws Exception {
mysqlInventoryDatabase.getDatabaseName()));
validateResult(
- "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512)}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512)}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[104, user_4, Shanghai, 123567891234], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[103, user_3, Shanghai, 123567891234], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[102, user_2, Shanghai, 123567891234], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[101, user_1, Shanghai, 123567891234], op=INSERT, meta=()}",
- "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[109, spare tire, 24 inch spare tire, 22.2, null, null, null], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[107, rocks, box of assorted rocks, 5.3, null, null, null], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[108, jacket, water resistent black wind breaker, 0.1, null, null, null], op=INSERT, meta=()}",
@@ -381,12 +381,12 @@ public void testSoftDelete() throws Exception {
mysqlInventoryDatabase.getDatabaseName()));
validateResult(
- "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512),`op_type` STRING NOT NULL}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512),`op_type` STRING NOT NULL}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[104, user_4, Shanghai, 123567891234, +I], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[103, user_3, Shanghai, 123567891234, +I], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[102, user_2, Shanghai, 123567891234, +I], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.customers, before=[], after=[101, user_1, Shanghai, 123567891234, +I], op=INSERT, meta=()}",
- "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING,`op_type` STRING NOT NULL}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING,`op_type` STRING NOT NULL}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[109, spare tire, 24 inch spare tire, 22.2, null, null, null, +I], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[107, rocks, box of assorted rocks, 5.3, null, null, null, +I], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.products, before=[], after=[108, jacket, water resistent black wind breaker, 0.1, null, null, null, +I], op=INSERT, meta=()}",
@@ -546,8 +546,8 @@ public void testDanglingDropTableEventInBinlog() throws Exception {
submitPipelineJob(pipelineJob, mysqlCdcJar, valuesCdcJar, mysqlDriverJar);
waitUntilJobRunning(Duration.ofSeconds(30));
validateResult(
- "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512)}, primaryKeys=id, comment=null, options=()}",
- "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING}, primaryKeys=id, comment=null, options=()}");
+ "CreateTableEvent{tableId=%s.customers, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`address` VARCHAR(1024),`phone_number` VARCHAR(512)}, primaryKeys=id, options=()}",
+ "CreateTableEvent{tableId=%s.products, schema=columns={`id` INT NOT NULL,`name` VARCHAR(255) NOT NULL 'flink',`description` VARCHAR(512),`weight` FLOAT,`enum_c` STRING 'red',`json_c` STRING,`point_c` STRING}, primaryKeys=id, options=()}");
}
private void validateResult(String... expectedEvents) throws Exception {
diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/RouteE2eITCase.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/RouteE2eITCase.java
index 76d8d46f6c9..cd342ef460d 100644
--- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/RouteE2eITCase.java
+++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/RouteE2eITCase.java
@@ -173,19 +173,19 @@ public void testDefaultRoute() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEGAMMA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEGAMMA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
validateResult(
@@ -270,7 +270,7 @@ public void testMergeTableRoute() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.ALL, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.ALL, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
validateResult(
@@ -354,17 +354,17 @@ public void testPartialRoute() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.ALPHABET, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.ALPHABET, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEGAMMA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEGAMMA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
validateResult(
@@ -454,17 +454,17 @@ public void testMultipleRoute() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.ALPHABET, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.ALPHABET, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.BETAGAMM, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.BETAGAMM, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
validateResult(
@@ -564,17 +564,17 @@ public void testOneToManyRoute() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.TABLEA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.TABLEA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.TABLEB, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.TABLEB, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.TABLEC, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.TABLEC, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
validateResult(
@@ -674,7 +674,7 @@ public void testMergeTableRouteWithTransform() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.ALL, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`EXTRAS` STRING}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.ALL, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`EXTRAS` STRING}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
validateResult(
@@ -760,19 +760,19 @@ public void testReplacementSymbol() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.NEW_TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.NEW_TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.NEW_TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.NEW_TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.NEW_TABLEGAMMA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.NEW_TABLEGAMMA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=NEW_%s.NEW_TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=NEW_%s.NEW_TABLEDELTA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
routeTestDatabase.getDatabaseName()));
validateResult(
@@ -880,7 +880,7 @@ public void testExtremeMergeTableRoute() throws Exception {
i ->
String.format(
prefix
- + "CreateTableEvent{tableId=%s.TABLE%d, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ + "CreateTableEvent{tableId=%s.TABLE%d, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
databaseName,
i))
.toArray(String[]::new));
diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolveE2eITCase.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolveE2eITCase.java
index 20e902d624d..98e4deba9a5 100644
--- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolveE2eITCase.java
+++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolveE2eITCase.java
@@ -577,7 +577,7 @@ private void testGenericSchemaEvolution(
private void validateSnapshotData(String dbName, String tableName) throws Exception {
List expected =
Stream.of(
- "CreateTableEvent{tableId=%s.%s, schema=columns={`id` INT NOT NULL,`name` VARCHAR(17),`age` INT}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.%s, schema=columns={`id` INT NOT NULL,`name` VARCHAR(17),`age` INT}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId=%s.%s, before=[], after=[1008, Alice, 21], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.%s, before=[], after=[1009, Bob, 20], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.%s, before=[], after=[1010, Carol, 19], op=INSERT, meta=()}",
diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolvingTransformE2eITCase.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolvingTransformE2eITCase.java
index 2ff72be7cea..7523387cf70 100644
--- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolvingTransformE2eITCase.java
+++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/SchemaEvolvingTransformE2eITCase.java
@@ -389,7 +389,7 @@ private void testGenericSchemaEvolution(
private void validateSnapshotData(String dbName, String tableName) throws Exception {
List expected =
Stream.of(
- "CreateTableEvent{tableId=%s.%s, schema=columns={`uid` STRING,`id` INT NOT NULL,`name` VARCHAR(17),`age` INT,`id_square` INT,`tag` STRING}, primaryKeys=id, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.%s, schema=columns={`uid` STRING,`id` INT NOT NULL,`name` VARCHAR(17),`age` INT,`id_square` INT,`tag` STRING}, primaryKeys=id, options=()}",
"DataChangeEvent{tableId=%s.%s, before=[], after=[1009 -> Bob, 1009, Bob, 20, -1018081, age >= 20], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.%s, before=[], after=[1008 -> Alice, 1008, Alice, 21, -1016064, age >= 20], op=INSERT, meta=()}",
"DataChangeEvent{tableId=%s.%s, before=[], after=[1011 -> Derrida, 1011, Derrida, 18, 1022121, age < 20], op=INSERT, meta=()}",
diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/TransformE2eITCase.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/TransformE2eITCase.java
index 37c878b5870..8b292ff850a 100644
--- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/TransformE2eITCase.java
+++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/TransformE2eITCase.java
@@ -138,7 +138,7 @@ public void testHeteroSchemaTransform() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.terminus, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.terminus, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -219,13 +219,13 @@ public void testMultipleTransformRule() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CATEGORY` STRING}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CATEGORY` STRING}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CATEGORY` STRING}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CATEGORY` STRING}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -309,7 +309,7 @@ public void testAssortedSchemaTransform() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.terminus, schema=columns={`ID` INT NOT NULL,`VERSION` STRING,`NAME` STRING}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.terminus, schema=columns={`ID` INT NOT NULL,`VERSION` STRING,`NAME` STRING}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -386,13 +386,13 @@ public void testWildcardSchemaTransform() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` STRING,`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`NAME` STRING}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` STRING,`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`NAME` STRING}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` STRING,`CODENAMESBETA` VARCHAR(17),`AGEBETA` INT,`NAMEBETA` VARCHAR(128),`NAME` STRING}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` STRING,`CODENAMESBETA` VARCHAR(17),`AGEBETA` INT,`NAMEBETA` VARCHAR(128),`NAME` STRING}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -469,13 +469,13 @@ public void testWildcardWithMetadataColumnTransform() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`identifier_name` STRING,`type` STRING NOT NULL,`opts` BIGINT NOT NULL}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`identifier_name` STRING,`type` STRING NOT NULL,`opts` BIGINT NOT NULL}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CODENAMESBETA` VARCHAR(17),`AGEBETA` INT,`NAMEBETA` VARCHAR(128),`identifier_name` STRING,`type` STRING NOT NULL,`opts` BIGINT NOT NULL}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CODENAMESBETA` VARCHAR(17),`AGEBETA` INT,`NAMEBETA` VARCHAR(128),`identifier_name` STRING,`type` STRING NOT NULL,`opts` BIGINT NOT NULL}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -553,13 +553,13 @@ public void testMultipleHittingTable() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`UID` INT,`NEWVERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`UID` INT,`NEWVERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CODENAMESBETA` VARCHAR(17),`AGEBETA` INT,`NAMEBETA` VARCHAR(128),`UID` INT,`NEWVERSION` VARCHAR(17)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`CODENAMESBETA` VARCHAR(17),`AGEBETA` INT,`NAMEBETA` VARCHAR(128),`UID` INT,`NEWVERSION` VARCHAR(17)}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -639,7 +639,7 @@ public void testMultipleTransformWithDiffRefColumn() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`ROLENAME` STRING}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`ROLENAME` STRING}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -714,12 +714,12 @@ public void testTransformWithCast() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` DOUBLE,`IDENTIFIER` STRING}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` DOUBLE,`IDENTIFIER` STRING}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` DOUBLE,`IDENTIFIER` STRING}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` DOUBLE,`IDENTIFIER` STRING}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -833,7 +833,7 @@ public void testTransformWithSchemaEvolution() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`UID` STRING,`PRICE` INT}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`UID` STRING,`PRICE` INT}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -932,7 +932,7 @@ public void testTransformWildcardPrefixWithSchemaEvolution() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`UID` STRING}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128),`UID` STRING}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
@@ -1039,7 +1039,7 @@ public void testTransformWildcardSuffixWithSchemaEvolution() throws Exception {
waitUntilSpecificEvent(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`UID` STRING,`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128)}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`UID` STRING,`ID` INT NOT NULL,`VERSION` VARCHAR(17),`PRICEALPHA` INT,`AGEALPHA` INT,`NAMEALPHA` VARCHAR(128)}, primaryKeys=ID, options=()}",
transformTestDatabase.getDatabaseName()),
60000L);
diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/UdfE2eITCase.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/UdfE2eITCase.java
index 8917964a5be..9f5b98298c1 100644
--- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/UdfE2eITCase.java
+++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/UdfE2eITCase.java
@@ -179,7 +179,7 @@ private void testUserDefinedFunctions(String language) throws Exception {
List expectedEvents =
Arrays.asList(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`INC_ID` STRING,`FMT_VER` STRING}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`INC_ID` STRING,`FMT_VER` STRING}, primaryKeys=ID, options=()}",
transformRenameDatabase.getDatabaseName()),
String.format(
"DataChangeEvent{tableId=%s.TABLEALPHA, before=[], after=[1009, 8.1, 1011, <8.1>], op=INSERT, meta=()}",
@@ -191,7 +191,7 @@ private void testUserDefinedFunctions(String language) throws Exception {
"DataChangeEvent{tableId=%s.TABLEALPHA, before=[], after=[1011, 11, 1013, <11>], op=INSERT, meta=()}",
transformRenameDatabase.getDatabaseName()),
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`ANS` STRING,`TYP` STRING}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`ANS` STRING,`TYP` STRING}, primaryKeys=ID, options=()}",
transformRenameDatabase.getDatabaseName()),
String.format(
"DataChangeEvent{tableId=%s.TABLEBETA, before=[], after=[2011, 11, Forty-two, Integer: 2011], op=INSERT, meta=()}",
@@ -311,7 +311,7 @@ private void testFlinkCompatibleScalarFunctions(String language) throws Exceptio
List expectedEvents =
Arrays.asList(
String.format(
- "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`INC_ID` STRING,`FMT_VER` STRING}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEALPHA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`INC_ID` STRING,`FMT_VER` STRING}, primaryKeys=ID, options=()}",
transformRenameDatabase.getDatabaseName()),
String.format(
"DataChangeEvent{tableId=%s.TABLEALPHA, before=[], after=[1009, 8.1, 1011, <8.1>], op=INSERT, meta=()}",
@@ -323,7 +323,7 @@ private void testFlinkCompatibleScalarFunctions(String language) throws Exceptio
"DataChangeEvent{tableId=%s.TABLEALPHA, before=[], after=[1011, 11, 1013, <11>], op=INSERT, meta=()}",
transformRenameDatabase.getDatabaseName()),
String.format(
- "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`TYP` STRING}, primaryKeys=ID, comment=null, options=()}",
+ "CreateTableEvent{tableId=%s.TABLEBETA, schema=columns={`ID` INT NOT NULL,`VERSION` VARCHAR(17),`TYP` STRING}, primaryKeys=ID, options=()}",
transformRenameDatabase.getDatabaseName()),
String.format(
"DataChangeEvent{tableId=%s.TABLEBETA, before=[], after=[2011, 11, Integer: 2011], op=INSERT, meta=()}",
From e01941ae2e1fc728da983d6375ed2239a68a603c Mon Sep 17 00:00:00 2001
From: "north.lin" <37775475+qg-lin@users.noreply.github.com>
Date: Mon, 13 Jan 2025 14:03:17 +0800
Subject: [PATCH 08/11] Update Schema.java
---
.../main/java/org/apache/flink/cdc/common/schema/Schema.java | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java b/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
index 2252ed621a1..6586effd9ce 100644
--- a/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
+++ b/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
@@ -25,7 +25,7 @@
import org.apache.flink.cdc.common.types.RowType;
import org.apache.flink.cdc.common.utils.Preconditions;
-import org.apache.commons.lang3.StringUtils;
+import org.apache.flink.cdc.common.utils.StringUtils;
import javax.annotation.Nullable;
@@ -241,7 +241,7 @@ public String toString() {
if (!partitionKeys.isEmpty()) {
sb.append(", partitionKeys=").append(String.join(";", partitionKeys));
}
- if (StringUtils.isNotBlank(comment)) {
+ if (StringUtils.isNullOrWhitespaceOnly(comment)) {
sb.append(", comment=").append(comment);
}
sb.append(", options=").append(describeOptions());
From 5d27bf58576a0480bc3fbc542a0cfef8580d3531 Mon Sep 17 00:00:00 2001
From: "north.lin" <37775475+qg-lin@users.noreply.github.com>
Date: Mon, 13 Jan 2025 14:40:11 +0800
Subject: [PATCH 09/11] Update Schema.java
---
.../main/java/org/apache/flink/cdc/common/schema/Schema.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java b/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
index 6586effd9ce..f078a58f7ed 100644
--- a/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
+++ b/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
@@ -241,7 +241,7 @@ public String toString() {
if (!partitionKeys.isEmpty()) {
sb.append(", partitionKeys=").append(String.join(";", partitionKeys));
}
- if (StringUtils.isNullOrWhitespaceOnly(comment)) {
+ if (!StringUtils.isNullOrWhitespaceOnly(comment)) {
sb.append(", comment=").append(comment);
}
sb.append(", options=").append(describeOptions());
From 1729423948885bfb7644a68c2812bd10e9eab56b Mon Sep 17 00:00:00 2001
From: "north.lin" <37775475+qg-lin@users.noreply.github.com>
Date: Mon, 13 Jan 2025 16:39:27 +0800
Subject: [PATCH 10/11] format code
---
.../main/java/org/apache/flink/cdc/common/schema/Schema.java | 1 -
.../cdc/connectors/mysql/source/MySqlDataSourceOptions.java | 5 ++---
2 files changed, 2 insertions(+), 4 deletions(-)
diff --git a/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java b/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
index f078a58f7ed..175346bc071 100644
--- a/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
+++ b/flink-cdc-common/src/main/java/org/apache/flink/cdc/common/schema/Schema.java
@@ -24,7 +24,6 @@
import org.apache.flink.cdc.common.types.DataTypes;
import org.apache.flink.cdc.common.types.RowType;
import org.apache.flink.cdc.common.utils.Preconditions;
-
import org.apache.flink.cdc.common.utils.StringUtils;
import javax.annotation.Nullable;
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSourceOptions.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSourceOptions.java
index a1cae0d770a..3747212441c 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSourceOptions.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql/src/main/java/org/apache/flink/cdc/connectors/mysql/source/MySqlDataSourceOptions.java
@@ -281,7 +281,7 @@ public class MySqlDataSourceOptions {
.withDescription(
"List of readable metadata from SourceRecord to be passed to downstream, split by `,`. "
+ "Available readable metadata are: op_ts.");
-
+
@Experimental
public static final ConfigOption PARSE_ONLINE_SCHEMA_CHANGES =
ConfigOptions.key("scan.parse.online.schema.changes.enabled")
@@ -289,7 +289,7 @@ public class MySqlDataSourceOptions {
.defaultValue(false)
.withDescription(
"Whether to parse schema change events generated by gh-ost/pt-osc utilities. Defaults to false.");
-
+
@Experimental
public static final ConfigOption INCLUDE_COMMENTS_ENABLED =
ConfigOptions.key("include-comments.enabled")
@@ -298,5 +298,4 @@ public class MySqlDataSourceOptions {
.withDescription(
"Whether enable include table and column comments, by default is false, if set to true, table and column comments will be sent. "
+ "Note: Enable this option will bring the implications on memory usage.");
-
}
From cbfcbec223323515c4a267bcde8c9388b6badd2b Mon Sep 17 00:00:00 2001
From: "north.lin" <37775475+qg-lin@users.noreply.github.com>
Date: Wed, 15 Jan 2025 12:44:56 +0800
Subject: [PATCH 11/11] update DorisMetadataApplier
---
.../flink/cdc/connectors/doris/sink/DorisMetadataApplier.java | 2 +-
.../cdc/connectors/doris/sink/DorisSchemaChangeManager.java | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisMetadataApplier.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisMetadataApplier.java
index 84b4e13855a..1d6476152b4 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisMetadataApplier.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisMetadataApplier.java
@@ -78,7 +78,7 @@ public class DorisMetadataApplier implements MetadataApplier {
public DorisMetadataApplier(DorisOptions dorisOptions, Configuration config) {
this.dorisOptions = dorisOptions;
this.schemaChangeManager =
- new SchemaChangeManager(dorisOptions, config.get(CHARSET_ENCODING));
+ new DorisSchemaChangeManager(dorisOptions, config.get(CHARSET_ENCODING));
this.config = config;
this.enabledSchemaEvolutionTypes = getSupportedSchemaEvolutionTypes();
}
diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisSchemaChangeManager.java b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisSchemaChangeManager.java
index a4636f0457c..5358faf7897 100644
--- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisSchemaChangeManager.java
+++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris/src/main/java/org/apache/flink/cdc/connectors/doris/sink/DorisSchemaChangeManager.java
@@ -27,8 +27,8 @@
/** An enriched version of Doris' {@link SchemaChangeManager}. */
public class DorisSchemaChangeManager extends SchemaChangeManager {
- public DorisSchemaChangeManager(DorisOptions dorisOptions) {
- super(dorisOptions);
+ public DorisSchemaChangeManager(DorisOptions dorisOptions, String charsetEncoding) {
+ super(dorisOptions, charsetEncoding);
}
public boolean truncateTable(String databaseName, String tableName)