forked from apache/gravitino
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[apache#2541] feat(spark-connector): support DDL, read and write oper…
…ations to Iceberg catalog
- Loading branch information
Showing
4 changed files
with
92 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
53 changes: 53 additions & 0 deletions
53
...nector/src/main/java/com/datastrato/gravitino/spark/connector/iceberg/IcebergAdaptor.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,53 @@ | ||
package com.datastrato.gravitino.spark.connector.iceberg; | ||
|
||
import com.datastrato.gravitino.rel.Table; | ||
import com.datastrato.gravitino.spark.connector.GravitinoCatalogAdaptor; | ||
import com.datastrato.gravitino.spark.connector.GravitinoSparkConfig; | ||
import com.datastrato.gravitino.spark.connector.PropertiesConverter; | ||
import com.datastrato.gravitino.spark.connector.table.SparkBaseTable; | ||
import com.google.common.base.Preconditions; | ||
import java.util.HashMap; | ||
import java.util.Map; | ||
import org.apache.commons.lang3.StringUtils; | ||
import org.apache.iceberg.spark.SparkCatalog; | ||
import org.apache.spark.sql.connector.catalog.Identifier; | ||
import org.apache.spark.sql.connector.catalog.TableCatalog; | ||
import org.apache.spark.sql.util.CaseInsensitiveStringMap; | ||
|
||
/** IcebergAdaptor provides specific operations for Iceberg Catalog to adapt to GravitinoCatalog. */ | ||
public class IcebergAdaptor implements GravitinoCatalogAdaptor { | ||
|
||
@Override | ||
public PropertiesConverter getPropertiesConverter() { | ||
return new IcebergPropertiesConverter(); | ||
} | ||
|
||
@Override | ||
public SparkBaseTable createSparkTable( | ||
Identifier identifier, | ||
Table gravitinoTable, | ||
TableCatalog sparkCatalog, | ||
PropertiesConverter propertiesConverter) { | ||
return new SparkIcebergTable(identifier, gravitinoTable, sparkCatalog, propertiesConverter); | ||
} | ||
|
||
@Override | ||
public TableCatalog createAndInitSparkCatalog( | ||
String name, CaseInsensitiveStringMap options, Map<String, String> properties) { | ||
Preconditions.checkArgument( | ||
properties != null, "Iceberg Catalog properties should not be null"); | ||
String metastoreUri = properties.get(GravitinoSparkConfig.GRAVITINO_HIVE_METASTORE_URI); | ||
Preconditions.checkArgument( | ||
StringUtils.isNotBlank(metastoreUri), | ||
"Couldn't get " | ||
+ GravitinoSparkConfig.GRAVITINO_HIVE_METASTORE_URI | ||
+ " from iceberg catalog properties"); | ||
|
||
TableCatalog icebergCatalog = new SparkCatalog(); | ||
HashMap<String, String> all = new HashMap<>(options); | ||
all.put(GravitinoSparkConfig.SPARK_HIVE_METASTORE_URI, metastoreUri); | ||
icebergCatalog.initialize(name, new CaseInsensitiveStringMap(all)); | ||
|
||
return icebergCatalog; | ||
} | ||
} |
18 changes: 18 additions & 0 deletions
18
...ain/java/com/datastrato/gravitino/spark/connector/iceberg/IcebergPropertiesConverter.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
package com.datastrato.gravitino.spark.connector.iceberg; | ||
|
||
import com.datastrato.gravitino.spark.connector.PropertiesConverter; | ||
import java.util.HashMap; | ||
import java.util.Map; | ||
|
||
/** Transform iceberg catalog properties between Spark and Gravitino. */ | ||
public class IcebergPropertiesConverter implements PropertiesConverter { | ||
@Override | ||
public Map<String, String> toGravitinoTableProperties(Map<String, String> properties) { | ||
return new HashMap<>(properties); | ||
} | ||
|
||
@Override | ||
public Map<String, String> toSparkTableProperties(Map<String, String> properties) { | ||
return new HashMap<>(properties); | ||
} | ||
} |
18 changes: 18 additions & 0 deletions
18
...tor/src/main/java/com/datastrato/gravitino/spark/connector/iceberg/SparkIcebergTable.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
package com.datastrato.gravitino.spark.connector.iceberg; | ||
|
||
import com.datastrato.gravitino.rel.Table; | ||
import com.datastrato.gravitino.spark.connector.PropertiesConverter; | ||
import com.datastrato.gravitino.spark.connector.table.SparkBaseTable; | ||
import org.apache.spark.sql.connector.catalog.Identifier; | ||
import org.apache.spark.sql.connector.catalog.TableCatalog; | ||
|
||
public class SparkIcebergTable extends SparkBaseTable { | ||
|
||
public SparkIcebergTable( | ||
Identifier identifier, | ||
Table gravitinoTable, | ||
TableCatalog sparkCatalog, | ||
PropertiesConverter propertiesConverter) { | ||
super(identifier, gravitinoTable, sparkCatalog, propertiesConverter); | ||
} | ||
} |