Skip to content

Commit

Permalink
Switch to junit5 for mr except ParameterizedTest
Browse files Browse the repository at this point in the history
  • Loading branch information
lschetanrao committed Dec 7, 2023
1 parent d247b20 commit db24008
Show file tree
Hide file tree
Showing 17 changed files with 368 additions and 374 deletions.
3 changes: 3 additions & 0 deletions mr/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ project(':iceberg-mr') {
exclude group: 'org.apache.parquet', module: 'parquet-hadoop-bundle'
}
}
test {
useJUnitPlatform()
}

dependencies {
implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow')
Expand Down
62 changes: 31 additions & 31 deletions mr/src/test/java/org/apache/iceberg/mr/TestCatalogs.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import static org.assertj.core.api.Assertions.assertThat;

import java.io.IOException;
import java.nio.file.Path;
import java.util.Optional;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
Expand All @@ -40,11 +41,9 @@
import org.apache.iceberg.hive.HiveCatalog;
import org.apache.iceberg.types.Types;
import org.assertj.core.api.Assertions;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;

public class TestCatalogs {

Expand All @@ -54,9 +53,10 @@ public class TestCatalogs {

private Configuration conf;

@Rule public TemporaryFolder temp = new TemporaryFolder();
@TempDir
public Path temp;

@Before
@BeforeEach
public void before() {
conf = new Configuration();
}
Expand All @@ -70,17 +70,17 @@ public void testLoadTableFromLocation() throws IOException {
.hasMessage("Table location not set");

HadoopTables tables = new HadoopTables();
Table hadoopTable = tables.create(SCHEMA, temp.newFolder("hadoop_tables").toString());
Table hadoopTable = tables.create(SCHEMA, temp.resolve("hadoop_tables").toString());

conf.set(InputFormatConfig.TABLE_LOCATION, hadoopTable.location());

Assert.assertEquals(hadoopTable.location(), Catalogs.loadTable(conf).location());
Assertions.assertThat(Catalogs.loadTable(conf).location()).isEqualTo(hadoopTable.location());
}

@Test
public void testLoadTableFromCatalog() throws IOException {
String defaultCatalogName = "default";
String warehouseLocation = temp.newFolder("hadoop", "warehouse").toString();
String warehouseLocation = temp.resolve("hadoop").resolve("warehouse").toString();
setCustomCatalogProperties(defaultCatalogName, warehouseLocation);

Assertions.assertThatThrownBy(() -> Catalogs.loadTable(conf))
Expand All @@ -92,13 +92,13 @@ public void testLoadTableFromCatalog() throws IOException {

conf.set(InputFormatConfig.TABLE_IDENTIFIER, "table");

Assert.assertEquals(hadoopCatalogTable.location(), Catalogs.loadTable(conf).location());
Assertions.assertThat(Catalogs.loadTable(conf).location()).isEqualTo(hadoopCatalogTable.location());
}

@Test
public void testCreateDropTableToLocation() throws IOException {
Properties missingSchema = new Properties();
missingSchema.put("location", temp.newFolder("hadoop_tables").toString());
missingSchema.put("location", temp.resolve("hadoop_tables").toString());

Assertions.assertThatThrownBy(() -> Catalogs.createTable(conf, missingSchema))
.isInstanceOf(NullPointerException.class)
Expand All @@ -113,7 +113,7 @@ public void testCreateDropTableToLocation() throws IOException {
.hasMessage("Table location not set");

Properties properties = new Properties();
properties.put("location", temp.getRoot() + "/hadoop_tables");
properties.put("location", temp.toFile() + "/hadoop_tables");
properties.put(InputFormatConfig.TABLE_SCHEMA, SchemaParser.toJson(SCHEMA));
properties.put(InputFormatConfig.PARTITION_SPEC, PartitionSpecParser.toJson(SPEC));
properties.put("dummy", "test");
Expand All @@ -123,17 +123,17 @@ public void testCreateDropTableToLocation() throws IOException {
HadoopTables tables = new HadoopTables();
Table table = tables.load(properties.getProperty("location"));

Assert.assertEquals(properties.getProperty("location"), table.location());
Assert.assertEquals(SchemaParser.toJson(SCHEMA), SchemaParser.toJson(table.schema()));
Assert.assertEquals(PartitionSpecParser.toJson(SPEC), PartitionSpecParser.toJson(table.spec()));
Assertions.assertThat(table.location()).isEqualTo(properties.getProperty("location"));
Assertions.assertThat(SchemaParser.toJson(table.schema())).isEqualTo(SchemaParser.toJson(SCHEMA));
Assertions.assertThat(PartitionSpecParser.toJson(table.spec())).isEqualTo(PartitionSpecParser.toJson(SPEC));
assertThat(table.properties()).containsEntry("dummy", "test");

Assertions.assertThatThrownBy(() -> Catalogs.dropTable(conf, new Properties()))
.isInstanceOf(NullPointerException.class)
.hasMessage("Table location not set");

Properties dropProperties = new Properties();
dropProperties.put("location", temp.getRoot() + "/hadoop_tables");
dropProperties.put("location", temp.toFile() + "/hadoop_tables");
Catalogs.dropTable(conf, dropProperties);

Assertions.assertThatThrownBy(() -> Catalogs.loadTable(conf, dropProperties))
Expand All @@ -145,7 +145,7 @@ public void testCreateDropTableToLocation() throws IOException {
public void testCreateDropTableToCatalog() throws IOException {
TableIdentifier identifier = TableIdentifier.of("test", "table");
String defaultCatalogName = "default";
String warehouseLocation = temp.newFolder("hadoop", "warehouse").toString();
String warehouseLocation = temp.resolve("hadoop").resolve("warehouse").toString();

setCustomCatalogProperties(defaultCatalogName, warehouseLocation);

Expand Down Expand Up @@ -176,8 +176,8 @@ public void testCreateDropTableToCatalog() throws IOException {
HadoopCatalog catalog = new CustomHadoopCatalog(conf, warehouseLocation);
Table table = catalog.loadTable(identifier);

Assert.assertEquals(SchemaParser.toJson(SCHEMA), SchemaParser.toJson(table.schema()));
Assert.assertEquals(PartitionSpecParser.toJson(SPEC), PartitionSpecParser.toJson(table.spec()));
Assertions.assertThat(SchemaParser.toJson(table.schema())).isEqualTo(SchemaParser.toJson(SCHEMA));
Assertions.assertThat(PartitionSpecParser.toJson(table.spec())).isEqualTo(PartitionSpecParser.toJson(SPEC));
assertThat(table.properties()).containsEntry("dummy", "test");

Assertions.assertThatThrownBy(() -> Catalogs.dropTable(conf, new Properties()))
Expand All @@ -198,11 +198,11 @@ public void testCreateDropTableToCatalog() throws IOException {
public void testLoadCatalogDefault() {
String catalogName = "barCatalog";
Optional<Catalog> defaultCatalog = Catalogs.loadCatalog(conf, catalogName);
Assert.assertTrue(defaultCatalog.isPresent());
Assertions.assertThat(defaultCatalog.isPresent()).isTrue();
Assertions.assertThat(defaultCatalog.get()).isInstanceOf(HiveCatalog.class);
Properties properties = new Properties();
properties.put(InputFormatConfig.CATALOG_NAME, catalogName);
Assert.assertTrue(Catalogs.hiveCatalog(conf, properties));
Assertions.assertThat(Catalogs.hiveCatalog(conf, properties)).isTrue();
}

@Test
Expand All @@ -212,11 +212,11 @@ public void testLoadCatalogHive() {
InputFormatConfig.catalogPropertyConfigKey(catalogName, CatalogUtil.ICEBERG_CATALOG_TYPE),
CatalogUtil.ICEBERG_CATALOG_TYPE_HIVE);
Optional<Catalog> hiveCatalog = Catalogs.loadCatalog(conf, catalogName);
Assert.assertTrue(hiveCatalog.isPresent());
Assertions.assertThat(hiveCatalog.isPresent()).isTrue();
Assertions.assertThat(hiveCatalog.get()).isInstanceOf(HiveCatalog.class);
Properties properties = new Properties();
properties.put(InputFormatConfig.CATALOG_NAME, catalogName);
Assert.assertTrue(Catalogs.hiveCatalog(conf, properties));
Assertions.assertThat(Catalogs.hiveCatalog(conf, properties)).isTrue();
}

@Test
Expand All @@ -230,13 +230,13 @@ public void testLoadCatalogHadoop() {
catalogName, CatalogProperties.WAREHOUSE_LOCATION),
"/tmp/mylocation");
Optional<Catalog> hadoopCatalog = Catalogs.loadCatalog(conf, catalogName);
Assert.assertTrue(hadoopCatalog.isPresent());
Assertions.assertThat(hadoopCatalog.isPresent()).isTrue();
Assertions.assertThat(hadoopCatalog.get()).isInstanceOf(HadoopCatalog.class);
Assert.assertEquals(
"HadoopCatalog{name=barCatalog, location=/tmp/mylocation}", hadoopCatalog.get().toString());
Assertions.assertThat(hadoopCatalog.get().toString())
.isEqualTo("HadoopCatalog{name=barCatalog, location=/tmp/mylocation}");
Properties properties = new Properties();
properties.put(InputFormatConfig.CATALOG_NAME, catalogName);
Assert.assertFalse(Catalogs.hiveCatalog(conf, properties));
Assertions.assertThat(Catalogs.hiveCatalog(conf, properties)).isFalse();
}

@Test
Expand All @@ -250,16 +250,16 @@ public void testLoadCatalogCustom() {
catalogName, CatalogProperties.WAREHOUSE_LOCATION),
"/tmp/mylocation");
Optional<Catalog> customHadoopCatalog = Catalogs.loadCatalog(conf, catalogName);
Assert.assertTrue(customHadoopCatalog.isPresent());
Assertions.assertThat(customHadoopCatalog.isPresent()).isTrue();
Assertions.assertThat(customHadoopCatalog.get()).isInstanceOf(CustomHadoopCatalog.class);
Properties properties = new Properties();
properties.put(InputFormatConfig.CATALOG_NAME, catalogName);
Assert.assertFalse(Catalogs.hiveCatalog(conf, properties));
Assertions.assertThat(Catalogs.hiveCatalog(conf, properties)).isFalse();
}

@Test
public void testLoadCatalogLocation() {
Assert.assertFalse(Catalogs.loadCatalog(conf, Catalogs.ICEBERG_HADOOP_TABLE_NAME).isPresent());
Assertions.assertThat(Catalogs.loadCatalog(conf, Catalogs.ICEBERG_HADOOP_TABLE_NAME).isPresent()).isFalse();
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.ByteBuffers;
import org.junit.Assert;
import org.assertj.core.api.Assertions;

public class HiveIcebergTestUtils {
// TODO: Can this be a constant all around the Iceberg tests?
Expand Down Expand Up @@ -218,13 +218,12 @@ public static void assertEquals(Record expected, Record actual) {
for (int i = 0; i < expected.size(); ++i) {
if (expected.get(i) instanceof OffsetDateTime) {
// For OffsetDateTime we just compare the actual instant
Assert.assertEquals(
((OffsetDateTime) expected.get(i)).toInstant(),
((OffsetDateTime) actual.get(i)).toInstant());
Assertions.assertThat(((OffsetDateTime) actual.get(i)).toInstant())
.isEqualTo(((OffsetDateTime) expected.get(i)).toInstant());
} else if (expected.get(i) instanceof byte[]) {
Assert.assertArrayEquals((byte[]) expected.get(i), (byte[]) actual.get(i));
Assertions.assertThat((byte[]) actual.get(i)).isEqualTo((byte[]) expected.get(i));
} else {
Assert.assertEquals(expected.get(i), actual.get(i));
Assertions.assertThat(actual.get(i)).isEqualTo(expected.get(i));
}
}
}
Expand Down Expand Up @@ -265,7 +264,7 @@ public static void validateData(List<Record> expected, List<Record> actual, int
sortedExpected.sort(Comparator.comparingLong(record -> (Long) record.get(sortBy)));
sortedActual.sort(Comparator.comparingLong(record -> (Long) record.get(sortBy)));

Assert.assertEquals(sortedExpected.size(), sortedActual.size());
Assertions.assertThat(sortedActual.size()).isEqualTo(sortedExpected.size());
for (int i = 0; i < sortedExpected.size(); ++i) {
assertEquals(sortedExpected.get(i), sortedActual.get(i));
}
Expand All @@ -288,9 +287,9 @@ public static void validateFiles(Table table, Configuration conf, JobID jobId, i
.filter(path -> !path.getFileName().toString().startsWith("."))
.collect(Collectors.toList());

Assert.assertEquals(dataFileNum, dataFiles.size());
Assert.assertFalse(
Assertions.assertThat(dataFiles.size()).isEqualTo(dataFileNum);
Assertions.assertThat(
new File(HiveIcebergOutputCommitter.generateJobLocation(table.location(), conf, jobId))
.exists());
.exists()).isFalse();
}
}
22 changes: 11 additions & 11 deletions mr/src/test/java/org/apache/iceberg/mr/hive/TestDeserializer.java
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,9 @@
import org.apache.iceberg.hive.HiveVersion;
import org.apache.iceberg.mr.hive.serde.objectinspector.IcebergObjectInspector;
import org.apache.iceberg.types.Types;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Assumptions;
import org.junit.jupiter.api.Test;

public class TestDeserializer {
private static final Schema CUSTOMER_SCHEMA =
Expand Down Expand Up @@ -74,7 +74,7 @@ public void testSchemaDeserialize() {

Record actual = deserializer.deserialize(new Object[] {new LongWritable(1L), new Text("Bob")});

Assert.assertEquals(expected, actual);
Assertions.assertThat(actual).isEqualTo(expected);
}

@Test
Expand All @@ -92,7 +92,7 @@ public void testStructDeserialize() {

Record actual = deserializer.deserialize(new Object[] {new LongWritable(1L), new Text("Bob")});

Assert.assertEquals(expected, actual);
Assertions.assertThat(actual).isEqualTo(expected);
}

@Test
Expand Down Expand Up @@ -127,7 +127,7 @@ public void testMapDeserialize() {
Object[] data = new Object[] {map};
Record actual = deserializer.deserialize(data);

Assert.assertEquals(expected, actual);
Assertions.assertThat(actual).isEqualTo(expected);
}

@Test
Expand Down Expand Up @@ -155,13 +155,13 @@ public void testListDeserialize() {
Object[] data = new Object[] {new Object[] {new LongWritable(1L)}};
Record actual = deserializer.deserialize(data);

Assert.assertEquals(expected, actual);
Assertions.assertThat(actual).isEqualTo(expected);
}

@Test
public void testDeserializeEverySupportedType() {
Assume.assumeFalse(
"No test yet for Hive3 (Date/Timestamp creation)", HiveVersion.min(HiveVersion.HIVE_3));
Assumptions.assumeFalse(HiveVersion.min(HiveVersion.HIVE_3),
"No test yet for Hive3 (Date/Timestamp creation)");

Deserializer deserializer =
new Deserializer.Builder()
Expand Down Expand Up @@ -196,9 +196,9 @@ public void testNullDeserialize() {

Record actual = deserializer.deserialize(nulls);

Assert.assertEquals(expected, actual);
Assertions.assertThat(actual).isEqualTo(expected);

// Check null record as well
Assert.assertNull(deserializer.deserialize(null));
Assertions.assertThat(deserializer.deserialize(null)).isNull();
}
}
Loading

0 comments on commit db24008

Please sign in to comment.