Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/stage.sh
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ fluss-lakehouse,\
fluss-lakehouse/fluss-lakehouse-cli,\
fluss-lakehouse/fluss-lakehouse-paimon,\
fluss-lake,\
fluss-lake/fluss-lake-paimon
"

function get_test_modules_for_stage() {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
/*
* Copyright (c) 2025 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.alibaba.fluss.lakehouse.lakestorage;

import com.alibaba.fluss.annotation.PublicEvolving;
import com.alibaba.fluss.exception.TableAlreadyExistException;
import com.alibaba.fluss.metadata.TableDescriptor;
import com.alibaba.fluss.metadata.TablePath;

/**
* A catalog interface to modify metadata in external datalake.
*
* @since 0.7
*/
@PublicEvolving
public interface LakeCatalog extends AutoCloseable {

/**
* Create a new table in lake.
*
* @param tablePath path of the table to be created
* @param tableDescriptor The descriptor of the table to be created
* @throws TableAlreadyExistException if the table already exists
*/
void createTable(TablePath tablePath, TableDescriptor tableDescriptor)
throws TableAlreadyExistException;

@Override
default void close() throws Exception {
// default do nothing
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,7 @@ public interface LakeStorage {
* @return the lake tiering factory
*/
LakeTieringFactory createLakeTieringFactory();

/** Create lake catalog. */
LakeCatalog createLakeCatalog();
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
/*
* Copyright (c) 2025 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.alibaba.fluss.lakehouse.lakestorage;

import com.alibaba.fluss.config.ConfigOptions;
import com.alibaba.fluss.config.Configuration;
import com.alibaba.fluss.metadata.DataLakeFormat;
import com.alibaba.fluss.plugin.PluginManager;
import com.alibaba.fluss.shaded.guava32.com.google.common.collect.Iterators;

import javax.annotation.Nullable;

import java.util.Iterator;
import java.util.Objects;
import java.util.ServiceLoader;

/**
* Encapsulates everything needed for the instantiation and configuration of a {@link
* LakeStoragePlugin}.
*/
public class LakeStoragePluginSetUp {

@Nullable
public static LakeStoragePlugin fromConfiguration(
final Configuration configuration, @Nullable final PluginManager pluginManager) {
DataLakeFormat dataLakeFormat = configuration.get(ConfigOptions.DATALAKE_FORMAT);
if (dataLakeFormat == null) {
return null;
}
String dataLakeIdentifier = dataLakeFormat.toString();
// now, load lake storage plugin
Iterator<LakeStoragePlugin> lakeStoragePluginIterator =
getAllLakeStoragePlugins(pluginManager);

while (lakeStoragePluginIterator.hasNext()) {
LakeStoragePlugin lakeStoragePlugin = lakeStoragePluginIterator.next();
if (Objects.equals(lakeStoragePlugin.identifier(), dataLakeIdentifier)) {
return lakeStoragePlugin;
}
}

// if come here, means we haven't found LakeStoragePlugin match the configured
// datalake, throw exception
throw new UnsupportedOperationException(
"No LakeStoragePlugin can be found for datalake format: " + dataLakeIdentifier);
}

private static Iterator<LakeStoragePlugin> getAllLakeStoragePlugins(
@Nullable PluginManager pluginManager) {
final Iterator<LakeStoragePlugin> pluginIteratorSPI =
ServiceLoader.load(LakeStoragePlugin.class).iterator();
if (pluginManager == null) {
return pluginIteratorSPI;
} else {
return Iterators.concat(pluginManager.load(LakeStoragePlugin.class), pluginIteratorSPI);
}
}
}
22 changes: 20 additions & 2 deletions fluss-dist/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,26 @@
<scope>provided</scope>
</dependency>

<dependency>
<groupId>com.alibaba.fluss</groupId>
<artifactId>fluss-lake-paimon</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>

<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-shaded-hadoop-2-uber</artifactId>
<version>2.8.3-10.0</version>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>


<!-- Concrete logging framework - we add this only here (and not in the
root POM) to not tie the projects to one specific framework and make
Expand Down Expand Up @@ -118,8 +138,6 @@
<artifactId>log4j-1.2-api</artifactId>
<scope>compile</scope>
</dependency>


</dependencies>


Expand Down
23 changes: 23 additions & 0 deletions fluss-dist/src/main/assemblies/plugins.xml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,21 @@
<includeBaseDirectory>true</includeBaseDirectory>
<baseDirectory>fluss-${project.version}</baseDirectory>

<dependencySets>
<dependencySet>
<outputDirectory>plugins/paimon</outputDirectory>
<unpack>false</unpack>
<useProjectArtifact>false</useProjectArtifact>
<useProjectAttachments>false</useProjectAttachments>
<useTransitiveDependencies>true</useTransitiveDependencies>
<useTransitiveFiltering>true</useTransitiveFiltering>
<includes>
<include>org.apache.flink:flink-shaded-hadoop-2-uber</include>
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Paimon requires hadoop bundled, soe we include it in paimon plugin dir
https://paimon.apache.org/docs/master/flink/quick-start/

</includes>
</dependencySet>

</dependencySets>

<files>
<!-- filesystem -->
<!-- output directory should correspond to the file system *schema* name, i.e., plugins/<schema>/ -->
Expand Down Expand Up @@ -63,6 +78,14 @@
<destName>fluss-metrics-jmx-${project.version}.jar</destName>
<fileMode>0644</fileMode>
</file>

<!-- lake formats -->
<file>
<source>../fluss-lake/fluss-lake-paimon/target/fluss-lake-paimon-${project.version}.jar</source>
<outputDirectory>plugins/paimon/</outputDirectory>
<destName>fluss-lake-paimon-${project.version}.jar</destName>
<fileMode>0644</fileMode>
</file>
</files>

</assembly>
158 changes: 158 additions & 0 deletions fluss-lake/fluss-lake-paimon/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,158 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Copyright (c) 2025 Alibaba Group Holding Ltd.
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->

<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.alibaba.fluss</groupId>
<artifactId>fluss-lake</artifactId>
<version>0.7-SNAPSHOT</version>
</parent>

<artifactId>fluss-lake-paimon</artifactId>
<name>Fluss : Lake : Paimon</name>

<packaging>jar</packaging>

<properties>
<paimon.version>1.0.1</paimon.version>
</properties>

<dependencies>
<dependency>
<groupId>com.alibaba.fluss</groupId>
<artifactId>fluss-common</artifactId>
<version>${project.version}</version>
</dependency>

<dependency>
<groupId>org.apache.paimon</groupId>
<artifactId>paimon-bundle</artifactId>
<version>${paimon.version}</version>
</dependency>

<dependency>
<groupId>com.alibaba.fluss</groupId>
<artifactId>fluss-client</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>com.alibaba.fluss</groupId>
<artifactId>fluss-test-utils</artifactId>
</dependency>

<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-test</artifactId>
<version>${curator.version}</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>com.alibaba.fluss</groupId>
<artifactId>fluss-server</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
<version>${fluss.hadoop.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
<exclusions>
<exclusion>
<artifactId>avro</artifactId>
<groupId>org.apache.avro</groupId>
</exclusion>
<exclusion>
<artifactId>log4j</artifactId>
<groupId>log4j</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<groupId>ch.qos.reload4j</groupId>
<artifactId>reload4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-reload4j</artifactId>
</exclusion>
<exclusion>
<artifactId>jdk.tools</artifactId>
<groupId>jdk.tools</groupId>
</exclusion>
<exclusion>
<artifactId>protobuf-java</artifactId>
<groupId>com.google.protobuf</groupId>
</exclusion>
<exclusion>
<artifactId>commons-io</artifactId>
<groupId>commons-io</groupId>
</exclusion>
</exclusions>
</dependency>

<dependency>
<groupId>com.alibaba.fluss</groupId>
<artifactId>fluss-server</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies>


<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<executions>
<execution>
<id>shade-fluss</id>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<artifactSet>
<includes>
<include>org.apache.paimon:paimon-bundle</include>
</includes>
</artifactSet>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>


</project>
Loading