From 3f2447f0964b8c7eed6cd1e6b464b6654ec4dcea Mon Sep 17 00:00:00 2001 From: Junhao Liu Date: Mon, 4 Dec 2023 15:06:43 -0600 Subject: [PATCH 1/3] sytle: replace Arrays.asList when there's single object --- .../org/apache/iceberg/util/TestExceptionUtil.java | 7 ++++--- .../aws/s3/signer/TestS3SignRequestParser.java | 13 +++++++------ .../aws/s3/signer/TestS3SignResponseParser.java | 5 +++-- .../org/apache/iceberg/BaseRewriteManifests.java | 4 ++-- .../test/java/org/apache/iceberg/ScanTestBase.java | 12 ++++++------ .../org/apache/iceberg/TestContentFileParser.java | 3 ++- .../enumerator/TestContinuousIcebergEnumerator.java | 9 ++++----- .../source/reader/TestIcebergSourceReader.java | 2 +- .../source/reader/TestIcebergSourceReader.java | 2 +- .../source/reader/TestIcebergSourceReader.java | 2 +- .../org/apache/iceberg/hive/TestHiveSchemaUtil.java | 5 +++-- 11 files changed, 34 insertions(+), 30 deletions(-) diff --git a/api/src/test/java/org/apache/iceberg/util/TestExceptionUtil.java b/api/src/test/java/org/apache/iceberg/util/TestExceptionUtil.java index f2106cc62c5f..379e68ff27f2 100644 --- a/api/src/test/java/org/apache/iceberg/util/TestExceptionUtil.java +++ b/api/src/test/java/org/apache/iceberg/util/TestExceptionUtil.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; @@ -51,7 +52,7 @@ public void testRunSafely() { CustomCheckedException.class)) .isInstanceOf(CustomCheckedException.class) .isEqualTo(exc) - .extracting(e -> Arrays.asList(e.getSuppressed())) + .extracting(e -> Collections.singletonList(e.getSuppressed())) .asList() .hasSize(2) .containsExactly(suppressedOne, suppressedTwo); @@ -80,7 +81,7 @@ public void testRunSafelyTwoExceptions() { IOException.class)) .isInstanceOf(CustomCheckedException.class) .isEqualTo(exc) - .extracting(e -> Arrays.asList(e.getSuppressed())) + .extracting(e -> Collections.singletonList(e.getSuppressed())) .asList() .hasSize(2) .containsExactly(suppressedOne, suppressedTwo); @@ -110,7 +111,7 @@ public void testRunSafelyThreeExceptions() { ClassNotFoundException.class)) .isInstanceOf(CustomCheckedException.class) .isEqualTo(exc) - .extracting(e -> Arrays.asList(e.getSuppressed())) + .extracting(e -> Collections.singletonList(e.getSuppressed())) .asList() .hasSize(2) .containsExactly(suppressedOne, suppressedTwo); diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignRequestParser.java b/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignRequestParser.java index a6928183f705..2c5f74ad8064 100644 --- a/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignRequestParser.java +++ b/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignRequestParser.java @@ -21,6 +21,7 @@ import com.fasterxml.jackson.databind.JsonNode; import java.net.URI; import java.util.Arrays; +import java.util.Collections; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; @@ -122,9 +123,9 @@ public void roundTripSerde() { "amz-sdk-request", Arrays.asList("attempt=1", "max=4"), "Content-Length", - Arrays.asList("191"), + Collections.singletonList("191"), "Content-Type", - Arrays.asList("application/json"), + Collections.singletonList("application/json"), "User-Agent", Arrays.asList("aws-sdk-java/2.20.18", "Linux/5.4.0-126"))) .build(); @@ -158,9 +159,9 @@ public void roundTripSerdeWithProperties() { "amz-sdk-request", Arrays.asList("attempt=1", "max=4"), "Content-Length", - Arrays.asList("191"), + Collections.singletonList("191"), "Content-Type", - Arrays.asList("application/json"), + Collections.singletonList("application/json"), "User-Agent", Arrays.asList("aws-sdk-java/2.20.18", "Linux/5.4.0-126"))) .properties(ImmutableMap.of("k1", "v1")) @@ -198,9 +199,9 @@ public void roundTripWithBody() { "amz-sdk-request", Arrays.asList("attempt=1", "max=4"), "Content-Length", - Arrays.asList("191"), + Collections.singletonList("191"), "Content-Type", - Arrays.asList("application/json"), + Collections.singletonList("application/json"), "User-Agent", Arrays.asList("aws-sdk-java/2.20.18", "Linux/5.4.0-126"))) .properties(ImmutableMap.of("k1", "v1")) diff --git a/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignResponseParser.java b/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignResponseParser.java index d7337b1b1777..d2cf132ba598 100644 --- a/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignResponseParser.java +++ b/aws/src/test/java/org/apache/iceberg/aws/s3/signer/TestS3SignResponseParser.java @@ -21,6 +21,7 @@ import com.fasterxml.jackson.databind.JsonNode; import java.net.URI; import java.util.Arrays; +import java.util.Collections; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; @@ -70,9 +71,9 @@ public void roundTripSerde() { "amz-sdk-request", Arrays.asList("attempt=1", "max=4"), "Content-Length", - Arrays.asList("191"), + Collections.singletonList("191"), "Content-Type", - Arrays.asList("application/json"), + Collections.singletonList("application/json"), "User-Agent", Arrays.asList("aws-sdk-java/2.20.18", "Linux/5.4.0-126"))) .build(); diff --git a/core/src/main/java/org/apache/iceberg/BaseRewriteManifests.java b/core/src/main/java/org/apache/iceberg/BaseRewriteManifests.java index 87768e34894a..c70dda2bd6d0 100644 --- a/core/src/main/java/org/apache/iceberg/BaseRewriteManifests.java +++ b/core/src/main/java/org/apache/iceberg/BaseRewriteManifests.java @@ -22,8 +22,8 @@ import static org.apache.iceberg.TableProperties.MANIFEST_TARGET_SIZE_BYTES_DEFAULT; import java.io.IOException; -import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -247,7 +247,7 @@ private void performRewrite(List currentManifests) { rewrittenManifests.add(manifest); try (ManifestReader reader = ManifestFiles.read(manifest, ops.io(), ops.current().specsById()) - .select(Arrays.asList("*"))) { + .select(Collections.singletonList("*"))) { reader .liveEntries() .forEach( diff --git a/core/src/test/java/org/apache/iceberg/ScanTestBase.java b/core/src/test/java/org/apache/iceberg/ScanTestBase.java index 5e7793939925..48a8ccbaa941 100644 --- a/core/src/test/java/org/apache/iceberg/ScanTestBase.java +++ b/core/src/test/java/org/apache/iceberg/ScanTestBase.java @@ -23,7 +23,7 @@ import java.io.File; import java.io.IOException; -import java.util.Arrays; +import java.util.Collections; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicInteger; import org.apache.iceberg.expressions.Expression; @@ -56,7 +56,7 @@ public ScanTestBase(int formatVersion) { @Test public void testTableScanHonorsSelect() { - ScanT scan = newScan().select(Arrays.asList("id")); + ScanT scan = newScan().select(Collections.singletonList("id")); Schema expectedSchema = new Schema(required(1, "id", Types.IntegerType.get())); @@ -69,20 +69,20 @@ public void testTableScanHonorsSelect() { @Test public void testTableBothProjectAndSelect() { Assertions.assertThatThrownBy( - () -> newScan().select(Arrays.asList("id")).project(SCHEMA.select("data"))) + () -> newScan().select(Collections.singletonList("id")).project(SCHEMA.select("data"))) .isInstanceOf(IllegalStateException.class) .hasMessage("Cannot set projection schema when columns are selected"); Assertions.assertThatThrownBy( - () -> newScan().project(SCHEMA.select("data")).select(Arrays.asList("id"))) + () -> newScan().project(SCHEMA.select("data")).select(Collections.singletonList("id"))) .isInstanceOf(IllegalStateException.class) .hasMessage("Cannot select columns when projection schema is set"); } @Test public void testTableScanHonorsSelectWithoutCaseSensitivity() { - ScanT scan1 = newScan().caseSensitive(false).select(Arrays.asList("ID")); + ScanT scan1 = newScan().caseSensitive(false).select(Collections.singletonList("ID")); // order of refinements shouldn't matter - ScanT scan2 = newScan().select(Arrays.asList("ID")).caseSensitive(false); + ScanT scan2 = newScan().select(Collections.singletonList("ID")).caseSensitive(false); Schema expectedSchema = new Schema(required(1, "id", Types.IntegerType.get())); diff --git a/core/src/test/java/org/apache/iceberg/TestContentFileParser.java b/core/src/test/java/org/apache/iceberg/TestContentFileParser.java index f3b5e7b60c31..4fda388159ec 100644 --- a/core/src/test/java/org/apache/iceberg/TestContentFileParser.java +++ b/core/src/test/java/org/apache/iceberg/TestContentFileParser.java @@ -21,6 +21,7 @@ import com.fasterxml.jackson.databind.JsonNode; import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.Collections; import java.util.stream.Stream; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.types.Comparators; @@ -270,7 +271,7 @@ private static DeleteFile deleteFileWithAllOptional(PartitionSpec spec) { metrics, new int[] {3}, 1, - Arrays.asList(128L), + Collections.singletonList(128L), ByteBuffer.wrap(new byte[16])); } diff --git a/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/source/enumerator/TestContinuousIcebergEnumerator.java b/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/source/enumerator/TestContinuousIcebergEnumerator.java index 349eb11cf549..0dfbab4c62bb 100644 --- a/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/source/enumerator/TestContinuousIcebergEnumerator.java +++ b/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/source/enumerator/TestContinuousIcebergEnumerator.java @@ -18,7 +18,6 @@ */ package org.apache.iceberg.flink.source.enumerator; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -173,7 +172,7 @@ public void testThrottlingDiscovery() throws Exception { enumerator.handleSourceEvent(2, new SplitRequestEvent()); // add splits[0] to the planner for next discovery - splitPlanner.addSplits(Arrays.asList(splits.get(0))); + splitPlanner.addSplits(Collections.singletonList(splits.get(0))); enumeratorContext.triggerAllActions(); // because discovered split was assigned to reader, pending splits should be empty @@ -185,7 +184,7 @@ public void testThrottlingDiscovery() throws Exception { // add the remaining 9 splits (one for every snapshot) // run discovery cycles while reader-2 still processing the splits[0] for (int i = 1; i < 10; ++i) { - splitPlanner.addSplits(Arrays.asList(splits.get(i))); + splitPlanner.addSplits(Collections.singletonList(splits.get(i))); enumeratorContext.triggerAllActions(); } @@ -196,7 +195,7 @@ public void testThrottlingDiscovery() throws Exception { splits.subList(0, 1), enumeratorContext.getSplitAssignments().get(2).getAssignedSplits()); // now reader-2 finished splits[0] - enumerator.handleSourceEvent(2, new SplitRequestEvent(Arrays.asList(splits.get(0).splitId()))); + enumerator.handleSourceEvent(2, new SplitRequestEvent(Collections.singletonList(splits.get(0).splitId()))); enumeratorContext.triggerAllActions(); // still have 3 pending splits. After assigned splits[1] to reader-2, one more split was // discovered and added. @@ -217,7 +216,7 @@ public void testThrottlingDiscovery() throws Exception { splits.subList(0, 2), enumeratorContext.getSplitAssignments().get(2).getAssignedSplits()); // now reader-2 finished splits[1] - enumerator.handleSourceEvent(2, new SplitRequestEvent(Arrays.asList(splits.get(1).splitId()))); + enumerator.handleSourceEvent(2, new SplitRequestEvent(Collections.singletonList(splits.get(1).splitId()))); enumeratorContext.triggerAllActions(); // still have 3 pending splits. After assigned new splits[2] to reader-2, one more split was // discovered and added. diff --git a/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/source/reader/TestIcebergSourceReader.java b/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/source/reader/TestIcebergSourceReader.java index 88234c61123f..f19d57083b89 100644 --- a/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/source/reader/TestIcebergSourceReader.java +++ b/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/source/reader/TestIcebergSourceReader.java @@ -132,7 +132,7 @@ private void testOneSplitFetcher( ReaderUtil.createCombinedScanTask( recordBatchList, TEMPORARY_FOLDER, FileFormat.PARQUET, appenderFactory); IcebergSourceSplit split = IcebergSourceSplit.fromCombinedScanTask(task); - reader.addSplits(Arrays.asList(split)); + reader.addSplits(Collections.singletonList(split)); while (readerOutput.getEmittedRecords().size() < expectedCount) { reader.pollNext(readerOutput); diff --git a/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/source/reader/TestIcebergSourceReader.java b/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/source/reader/TestIcebergSourceReader.java index 88234c61123f..f19d57083b89 100644 --- a/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/source/reader/TestIcebergSourceReader.java +++ b/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/source/reader/TestIcebergSourceReader.java @@ -132,7 +132,7 @@ private void testOneSplitFetcher( ReaderUtil.createCombinedScanTask( recordBatchList, TEMPORARY_FOLDER, FileFormat.PARQUET, appenderFactory); IcebergSourceSplit split = IcebergSourceSplit.fromCombinedScanTask(task); - reader.addSplits(Arrays.asList(split)); + reader.addSplits(Collections.singletonList(split)); while (readerOutput.getEmittedRecords().size() < expectedCount) { reader.pollNext(readerOutput); diff --git a/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/source/reader/TestIcebergSourceReader.java b/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/source/reader/TestIcebergSourceReader.java index 88234c61123f..f19d57083b89 100644 --- a/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/source/reader/TestIcebergSourceReader.java +++ b/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/source/reader/TestIcebergSourceReader.java @@ -132,7 +132,7 @@ private void testOneSplitFetcher( ReaderUtil.createCombinedScanTask( recordBatchList, TEMPORARY_FOLDER, FileFormat.PARQUET, appenderFactory); IcebergSourceSplit split = IcebergSourceSplit.fromCombinedScanTask(task); - reader.addSplits(Arrays.asList(split)); + reader.addSplits(Collections.singletonList(split)); while (readerOutput.getEmittedRecords().size() < expectedCount) { reader.pollNext(readerOutput); diff --git a/hive-metastore/src/test/java/org/apache/iceberg/hive/TestHiveSchemaUtil.java b/hive-metastore/src/test/java/org/apache/iceberg/hive/TestHiveSchemaUtil.java index 84d11d03a741..e5410bc94e70 100644 --- a/hive-metastore/src/test/java/org/apache/iceberg/hive/TestHiveSchemaUtil.java +++ b/hive-metastore/src/test/java/org/apache/iceberg/hive/TestHiveSchemaUtil.java @@ -23,6 +23,7 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.stream.Collectors; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -147,7 +148,7 @@ public void testSchemaConvertToIcebergSchemaForEveryPrimitiveType() { public void testNotSupportedTypes() { for (FieldSchema notSupportedField : getNotSupportedFieldSchemas()) { assertThatThrownBy( - () -> HiveSchemaUtil.convert(Lists.newArrayList(Arrays.asList(notSupportedField)))) + () -> HiveSchemaUtil.convert(Lists.newArrayList(Collections.singletonList(notSupportedField)))) .isInstanceOf(IllegalArgumentException.class) .hasMessageStartingWith("Unsupported Hive type"); } @@ -197,7 +198,7 @@ public void testConversionWithoutLastComment() { Arrays.asList( TypeInfoUtils.getTypeInfoFromTypeString(serdeConstants.BIGINT_TYPE_NAME), TypeInfoUtils.getTypeInfoFromTypeString(serdeConstants.STRING_TYPE_NAME)), - Arrays.asList("customer comment")); + Collections.singletonList("customer comment")); assertThat(schema.asStruct()).isEqualTo(expected.asStruct()); } From 5fc225a6c0d9810e9250dffbafdbbbc2b0db844d Mon Sep 17 00:00:00 2001 From: Junhao Liu Date: Mon, 4 Dec 2023 15:07:26 -0600 Subject: [PATCH 2/3] style: format --- .../test/java/org/apache/iceberg/hive/TestHiveSchemaUtil.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/hive-metastore/src/test/java/org/apache/iceberg/hive/TestHiveSchemaUtil.java b/hive-metastore/src/test/java/org/apache/iceberg/hive/TestHiveSchemaUtil.java index e5410bc94e70..1592a3461b40 100644 --- a/hive-metastore/src/test/java/org/apache/iceberg/hive/TestHiveSchemaUtil.java +++ b/hive-metastore/src/test/java/org/apache/iceberg/hive/TestHiveSchemaUtil.java @@ -148,7 +148,9 @@ public void testSchemaConvertToIcebergSchemaForEveryPrimitiveType() { public void testNotSupportedTypes() { for (FieldSchema notSupportedField : getNotSupportedFieldSchemas()) { assertThatThrownBy( - () -> HiveSchemaUtil.convert(Lists.newArrayList(Collections.singletonList(notSupportedField)))) + () -> + HiveSchemaUtil.convert( + Lists.newArrayList(Collections.singletonList(notSupportedField)))) .isInstanceOf(IllegalArgumentException.class) .hasMessageStartingWith("Unsupported Hive type"); } From f997a8b7bfea072b469a4e0e2bb175d639d1ad82 Mon Sep 17 00:00:00 2001 From: Junhao Liu Date: Mon, 4 Dec 2023 17:28:05 -0600 Subject: [PATCH 3/3] style: format code --- .../java/org/apache/iceberg/util/TestExceptionUtil.java | 7 +++---- .../source/enumerator/TestContinuousIcebergEnumerator.java | 6 ++++-- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/api/src/test/java/org/apache/iceberg/util/TestExceptionUtil.java b/api/src/test/java/org/apache/iceberg/util/TestExceptionUtil.java index 379e68ff27f2..f2106cc62c5f 100644 --- a/api/src/test/java/org/apache/iceberg/util/TestExceptionUtil.java +++ b/api/src/test/java/org/apache/iceberg/util/TestExceptionUtil.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; @@ -52,7 +51,7 @@ public void testRunSafely() { CustomCheckedException.class)) .isInstanceOf(CustomCheckedException.class) .isEqualTo(exc) - .extracting(e -> Collections.singletonList(e.getSuppressed())) + .extracting(e -> Arrays.asList(e.getSuppressed())) .asList() .hasSize(2) .containsExactly(suppressedOne, suppressedTwo); @@ -81,7 +80,7 @@ public void testRunSafelyTwoExceptions() { IOException.class)) .isInstanceOf(CustomCheckedException.class) .isEqualTo(exc) - .extracting(e -> Collections.singletonList(e.getSuppressed())) + .extracting(e -> Arrays.asList(e.getSuppressed())) .asList() .hasSize(2) .containsExactly(suppressedOne, suppressedTwo); @@ -111,7 +110,7 @@ public void testRunSafelyThreeExceptions() { ClassNotFoundException.class)) .isInstanceOf(CustomCheckedException.class) .isEqualTo(exc) - .extracting(e -> Collections.singletonList(e.getSuppressed())) + .extracting(e -> Arrays.asList(e.getSuppressed())) .asList() .hasSize(2) .containsExactly(suppressedOne, suppressedTwo); diff --git a/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/source/enumerator/TestContinuousIcebergEnumerator.java b/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/source/enumerator/TestContinuousIcebergEnumerator.java index 0dfbab4c62bb..5b0ed39745c5 100644 --- a/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/source/enumerator/TestContinuousIcebergEnumerator.java +++ b/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/source/enumerator/TestContinuousIcebergEnumerator.java @@ -195,7 +195,8 @@ public void testThrottlingDiscovery() throws Exception { splits.subList(0, 1), enumeratorContext.getSplitAssignments().get(2).getAssignedSplits()); // now reader-2 finished splits[0] - enumerator.handleSourceEvent(2, new SplitRequestEvent(Collections.singletonList(splits.get(0).splitId()))); + enumerator.handleSourceEvent( + 2, new SplitRequestEvent(Collections.singletonList(splits.get(0).splitId()))); enumeratorContext.triggerAllActions(); // still have 3 pending splits. After assigned splits[1] to reader-2, one more split was // discovered and added. @@ -216,7 +217,8 @@ public void testThrottlingDiscovery() throws Exception { splits.subList(0, 2), enumeratorContext.getSplitAssignments().get(2).getAssignedSplits()); // now reader-2 finished splits[1] - enumerator.handleSourceEvent(2, new SplitRequestEvent(Collections.singletonList(splits.get(1).splitId()))); + enumerator.handleSourceEvent( + 2, new SplitRequestEvent(Collections.singletonList(splits.get(1).splitId()))); enumeratorContext.triggerAllActions(); // still have 3 pending splits. After assigned new splits[2] to reader-2, one more split was // discovered and added.