Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[AMORO-2113] Apply spotless-maven-plugin in trino module #2131

Merged
merged 13 commits into from
Oct 20, 2023
1 change: 0 additions & 1 deletion .github/workflows/core-hadoop2-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ on:
- "flink/**"
- "hive/**"
- "spark/**"
- "trino/**"
- "pom.xml"

jobs:
Expand Down
1 change: 0 additions & 1 deletion .github/workflows/core-hadoop3-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ on:
- "flink/**"
- "hive/**"
- "spark/**"
- "trino/**"
- "pom.xml"

jobs:
Expand Down
8 changes: 3 additions & 5 deletions .github/workflows/trino-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,7 @@ on:
paths:
- "ams/**"
- "core/**"
- "flink/**"
- "hive/**"
- "spark/**"
- "trino/**"
- "pom.xml"

Expand All @@ -39,13 +37,13 @@ jobs:
cache: maven

- name: Validate checkstyle first
run: mvn validate
run: mvn validate -P trino-spotless

- name: Install dependency with Maven
run: mvn clean install -DskipTests -pl 'ams/api,core,hive' -Dhadoop=v2 -am -B
run: mvn clean install -DskipTests -pl 'ams/api,core,hive' -Dhadoop=v2 -am -B -P trino-spotless

- name: Build with Maven
run: mvn clean test -pl 'trino' -B -ntp -Dhadoop=v2
run: mvn clean test -pl 'trino' -B -ntp -Dhadoop=v2 -P trino-spotless

- name: Code coverage
uses: codecov/codecov-action@v3
Expand Down
6 changes: 5 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,12 @@ Amoro is built using Maven with Java 1.8 and Java 17(only for `trino` module).
* To package without trino module and JAVA 17 dependency: `mvn clean package -DskipTests -pl '!trino'`
* To build with hadoop 2.x(the default is 3.x) `mvn clean package -DskipTests -Dhadoop=v2`
* To indicate flink version for optimizer(the default is 1.14, 1.15 and 1.16 are available)
`mvn clean package -DskipTests -Doptimizer.flink=1.15`
`mvn clean package -DskipTests -Doptimizer.flink=1.15`

>Spotless is skipped by default in `trino` module. So if you want to perform checkstyle when building `trino` module, you must be in a Java 17 environment.

* To invoke a build include `trino` module in Java 17 environment: `mvn clean package -DskipTests -P trino-spotless`
* To only build `trino` and its dependent modules in Java 17 environment: `mvn clean package -DskipTests -P trino-spotless -pl 'trino' -am`
## Quickstart

Visit [https://amoro.netease.com/quick-demo/](https://amoro.netease.com/quick-demo/) to quickly
Expand Down
35 changes: 35 additions & 0 deletions trino/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -594,6 +594,27 @@
<skip>true</skip>
</configuration>
</plugin>

<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>

<plugin>
<groupId>com.diffplug.spotless</groupId>
<artifactId>spotless-maven-plugin</artifactId>
<configuration>
<skip>true</skip>
<java>
<googleJavaFormat>
<version>1.15.0</version>
</googleJavaFormat>
</java>
</configuration>
</plugin>
</plugins>
<sourceDirectory>src/main/java</sourceDirectory>
</build>
Expand Down Expand Up @@ -626,5 +647,19 @@
</plugins>
</build>
</profile>
<profile>
<id>trino-spotless</id>
<build>
<plugins>
<plugin>
<groupId>com.diffplug.spotless</groupId>
<artifactId>spotless-maven-plugin</artifactId>
<configuration>
<skip>false</skip>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>
11 changes: 4 additions & 7 deletions trino/src/main/java/com/netease/arctic/ArcticErrorCode.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,14 @@

package com.netease.arctic;

import static io.trino.spi.ErrorType.EXTERNAL;

import io.trino.spi.ErrorCode;
import io.trino.spi.ErrorCodeSupplier;
import io.trino.spi.ErrorType;

import static io.trino.spi.ErrorType.EXTERNAL;

/**
* Error code
*/
public enum ArcticErrorCode
implements ErrorCodeSupplier {
/** Error code */
public enum ArcticErrorCode implements ErrorCodeSupplier {
ARCTIC_BAD_DATA(4, EXTERNAL);

private final ErrorCode errorCode;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,10 @@
import com.netease.arctic.catalog.ArcticCatalog;
import com.netease.arctic.table.TableMetaStore;

/**
* A interface of factory to generate ArcticCatalog
*/
/** A interface of factory to generate ArcticCatalog */
public interface ArcticCatalogFactory {

/**
* generate ArcticCatalog
*/
/** generate ArcticCatalog */
ArcticCatalog getArcticCatalog();

TableMetaStore getTableMetastore();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,12 +61,14 @@
import org.apache.iceberg.io.LocationProvider;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.StructLikeMap;

import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

/**
* A wrapper of {@link ArcticCatalog} to resolve sub table, such as "tableName#change","tableName#base"
* A wrapper of {@link ArcticCatalog} to resolve sub table, such as
* "tableName#change","tableName#base"
*/
public class ArcticCatalogSupportTableSuffix implements ArcticCatalog {

Expand All @@ -82,8 +84,7 @@ public String name() {
}

@Override
public void initialize(
AmsClient client, CatalogMeta meta, Map<String, String> properties) {
public void initialize(AmsClient client, CatalogMeta meta, Map<String, String> properties) {
arcticCatalog.initialize(client, meta, properties);
}

Expand Down Expand Up @@ -111,12 +112,18 @@ public List<TableIdentifier> listTables(String database) {
public ArcticTable loadTable(TableIdentifier tableIdentifier) {
TableNameResolve tableNameResolve = new TableNameResolve(tableIdentifier.getTableName());
if (tableNameResolve.withSuffix()) {
TableIdentifier newTableIdentifier = TableIdentifier.of(tableIdentifier.getCatalog(),
tableIdentifier.getDatabase(), tableNameResolve.getTableName());
TableIdentifier newTableIdentifier =
TableIdentifier.of(
tableIdentifier.getCatalog(),
tableIdentifier.getDatabase(),
tableNameResolve.getTableName());
ArcticTable arcticTable = arcticCatalog.loadTable(newTableIdentifier);
if (arcticTable.isUnkeyedTable()) {
throw new IllegalArgumentException("table " + newTableIdentifier + " is not keyed table can not use " +
"change or base suffix");
throw new IllegalArgumentException(
"table "
+ newTableIdentifier
+ " is not keyed table can not use "
+ "change or base suffix");
}
KeyedTable keyedTable = arcticTable.asKeyedTable();
if (tableNameResolve.isBase()) {
Expand All @@ -139,8 +146,7 @@ public boolean dropTable(TableIdentifier tableIdentifier, boolean purge) {
}

@Override
public TableBuilder newTableBuilder(
TableIdentifier identifier, Schema schema) {
public TableBuilder newTableBuilder(TableIdentifier identifier, Schema schema) {
return arcticCatalog.newTableBuilder(identifier, schema);
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
Expand All @@ -22,9 +21,7 @@
import io.airlift.configuration.Config;
import io.airlift.configuration.ConfigDescription;

/**
* Arctic config
*/
/** Arctic config */
public class ArcticConfig {
private String catalogUrl;
private boolean hdfsImpersonationEnabled;
Expand Down
48 changes: 26 additions & 22 deletions trino/src/main/java/com/netease/arctic/trino/ArcticConnector.java
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
Expand All @@ -19,6 +18,13 @@

package com.netease.arctic.trino;

import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.Sets.immutableEnumSet;
import static io.trino.spi.connector.ConnectorCapabilities.NOT_NULL_COLUMN_CONSTRAINT;
import static io.trino.spi.transaction.IsolationLevel.SERIALIZABLE;
import static io.trino.spi.transaction.IsolationLevel.checkConnectorSupports;
import static java.util.Objects.requireNonNull;

import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import io.airlift.bootstrap.LifeCycleManager;
Expand All @@ -43,20 +49,12 @@
import org.slf4j.LoggerFactory;

import javax.inject.Inject;

import java.util.List;
import java.util.Optional;
import java.util.Set;

import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.Sets.immutableEnumSet;
import static io.trino.spi.connector.ConnectorCapabilities.NOT_NULL_COLUMN_CONSTRAINT;
import static io.trino.spi.transaction.IsolationLevel.SERIALIZABLE;
import static io.trino.spi.transaction.IsolationLevel.checkConnectorSupports;
import static java.util.Objects.requireNonNull;

/**
* A Connector of arctic to Trino
*/
/** A Connector of arctic to Trino */
public class ArcticConnector implements Connector {

private static final Logger log = LoggerFactory.getLogger(ArcticConnector.class);
Expand Down Expand Up @@ -93,15 +91,22 @@ public ArcticConnector(
this.splitManager = requireNonNull(splitManager, "splitManager is null");
this.pageSourceProvider = requireNonNull(pageSourceProvider, "pageSourceProvider is null");
this.pageSinkProvider = requireNonNull(pageSinkProvider, "pageSinkProvider is null");
this.nodePartitioningProvider = requireNonNull(nodePartitioningProvider, "nodePartitioningProvider is null");
this.sessionProperties = requireNonNull(sessionPropertiesProviders, "sessionPropertiesProviders is null").stream()
.flatMap(sessionPropertiesProvider -> sessionPropertiesProvider.getSessionProperties().stream())
.collect(toImmutableList());
this.schemaProperties = ImmutableList.copyOf(requireNonNull(schemaProperties, "schemaProperties is null"));
this.tableProperties = ImmutableList.copyOf(requireNonNull(tableProperties, "tableProperties is null"));
this.nodePartitioningProvider =
requireNonNull(nodePartitioningProvider, "nodePartitioningProvider is null");
this.sessionProperties =
requireNonNull(sessionPropertiesProviders, "sessionPropertiesProviders is null").stream()
.flatMap(
sessionPropertiesProvider ->
sessionPropertiesProvider.getSessionProperties().stream())
.collect(toImmutableList());
this.schemaProperties =
ImmutableList.copyOf(requireNonNull(schemaProperties, "schemaProperties is null"));
this.tableProperties =
ImmutableList.copyOf(requireNonNull(tableProperties, "tableProperties is null"));
this.accessControl = requireNonNull(accessControl, "accessControl is null");
this.procedures = ImmutableSet.copyOf(requireNonNull(procedures, "procedures is null"));
this.tableProcedures = ImmutableSet.copyOf(requireNonNull(tableProcedures, "tableProcedures is null"));
this.tableProcedures =
ImmutableSet.copyOf(requireNonNull(tableProcedures, "tableProcedures is null"));
}

@Override
Expand All @@ -110,7 +115,8 @@ public Set<ConnectorCapabilities> getCapabilities() {
}

@Override
public ConnectorMetadata getMetadata(ConnectorSession session, ConnectorTransactionHandle transaction) {
public ConnectorMetadata getMetadata(
ConnectorSession session, ConnectorTransactionHandle transaction) {
ConnectorMetadata metadata = transactionManager.get(transaction);
return new ClassLoaderSafeConnectorMetadata(metadata, getClass().getClassLoader());
}
Expand Down Expand Up @@ -172,9 +178,7 @@ public ConnectorAccessControl getAccessControl() {

@Override
public ConnectorTransactionHandle beginTransaction(
IsolationLevel isolationLevel,
boolean readOnly,
boolean autoCommit) {
IsolationLevel isolationLevel, boolean readOnly, boolean autoCommit) {
checkConnectorSupports(SERIALIZABLE, isolationLevel);
ConnectorTransactionHandle transaction = new HiveTransactionHandle(autoCommit);
transactionManager.begin(transaction);
Expand Down
Loading