diff --git a/flink/v1.15/flink/src/test/java/com/netease/arctic/flink/TestFlinkSchemaUtil.java b/flink/v1.15/flink/src/test/java/com/netease/arctic/flink/TestFlinkSchemaUtil.java deleted file mode 100644 index 208d382d97..0000000000 --- a/flink/v1.15/flink/src/test/java/com/netease/arctic/flink/TestFlinkSchemaUtil.java +++ /dev/null @@ -1,42 +0,0 @@ -package com.netease.arctic.flink; - -import org.apache.flink.table.api.DataTypes; -import org.apache.flink.table.api.TableSchema; -import org.apache.iceberg.Schema; -import org.junit.Assert; -import org.junit.Test; - -import java.util.ArrayList; -import java.util.Map; - -public class TestFlinkSchemaUtil { - @Test - public void testFlinkSchemaToIcebergSchema() { - // flinkSchema with physical column,compute column, watermark - TableSchema flinkSchema = - TableSchema.builder() - .field("id", DataTypes.INT().notNull()) - .field("name", DataTypes.STRING()) - .field("ts", DataTypes.TIMESTAMP(6)) - .field("compute_id", DataTypes.INT(), "`id` + 5") - .field("proc", DataTypes.TIMESTAMP_LTZ(), "PROCTIME()") - // org.apache.iceberg.flink.TypeToFlinkType will convert Timestamp to Timestamp(6), so - // we cast datatype manually - .field("ts3", DataTypes.TIMESTAMP(3), "cast(ts as timestamp(3))") - .watermark("ts3", "`ts3` - INTERVAL '5' SECOND", DataTypes.TIMESTAMP(3)) - .build(); - - // get physicalSchema from tableSchema - Schema icebergSchema = - org.apache.iceberg.flink.FlinkSchemaUtil.convert( - FlinkSchemaUtil.getPhysicalSchema(flinkSchema)); - - Map arcticProperties = FlinkSchemaUtil.addSchemaProperties(flinkSchema); - - // Convert iceberg Schema to flink TableSchema - TableSchema fromIcebergSchema = - FlinkSchemaUtil.toSchema(icebergSchema, new ArrayList<>(), arcticProperties); - - Assert.assertEquals(flinkSchema, fromIcebergSchema); - } -}