Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[AMS-Refactor]Use ArcticSparkSessionCatalog for terminal #1442

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,11 @@

package com.netease.arctic.server.terminal;

import com.netease.arctic.server.catalog.CatalogType;
import com.netease.arctic.server.utils.Configurations;
import com.netease.arctic.spark.ArcticSparkCatalog;
import com.netease.arctic.spark.ArcticSparkExtensions;
import com.netease.arctic.spark.ArcticSparkSessionCatalog;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.spark.SparkCatalog;
import org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions;
Expand Down Expand Up @@ -50,8 +52,18 @@ public static Map<String, String> getSparkConf(Configurations sessionConfig) {
sparkConf.put("spark.sql.catalog." + catalog + "." + key, property);
}
} else {
sparkConf.put("spark.sql.catalog." + catalog, ArcticSparkCatalog.class.getName());
sparkConf.put("spark.sql.catalog." + catalog + ".url", catalogUrlBase + catalog);
String sparkCatalogPrefix = "spark.sql.catalog." + catalog;
String catalogClassName = ArcticSparkCatalog.class.getName();
String type =
sessionConfig.get(TerminalSessionFactory.SessionConfigOptions.catalogProperty(catalog, "type"));
if (sessionConfig.getBoolean(
TerminalSessionFactory.SessionConfigOptions.USING_SESSION_CATALOG_FOR_HIVE) &&
CatalogType.HIVE.name().equalsIgnoreCase(type)) {
sparkCatalogPrefix = "spark.sql.catalog.spark_catalog";
catalogClassName = ArcticSparkSessionCatalog.class.getName();
}
sparkConf.put(sparkCatalogPrefix, catalogClassName);
sparkConf.put(sparkCatalogPrefix + ".url", catalogUrlBase + catalog);
}
}
return sparkConf;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@

package com.netease.arctic.server.terminal;

import com.netease.arctic.server.catalog.CatalogType;

import java.util.List;
import java.util.Map;

Expand Down Expand Up @@ -84,4 +86,14 @@ default boolean empty() {
* close session and release resources.
*/
void release();

static boolean canUseSparkSessionCatalog(Map<String, String> sessionConf, String catalog) {
String usingSessionCatalogForHiveKey =
TerminalSessionFactory.SessionConfigOptions.USING_SESSION_CATALOG_FOR_HIVE.key();
String usingSessionCatalogForHive =
sessionConf.getOrDefault(usingSessionCatalogForHiveKey, "false");
String type =
sessionConf.get(TerminalSessionFactory.SessionConfigOptions.catalogProperty(catalog, "type"));
return usingSessionCatalogForHive.equals("true") && CatalogType.HIVE.name().equalsIgnoreCase(type);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,11 @@ public static ConfigOption<String> catalogConnector(String catalog) {
.noDefaultValue();
}

public static ConfigOption<Boolean> USING_SESSION_CATALOG_FOR_HIVE = ConfigOptions
.key("using-session-catalog-for-hive")
.booleanType()
.defaultValue(false);

public static ConfigOption<String> catalogProperty(String catalog, String propertyKey) {
return ConfigOptions.key("catalog." + catalog + "." + propertyKey)
.stringType()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,17 @@ public Map<String, String> configs() {
@Override
public ResultSet executeStatement(String catalog, String statement) {
if (currentCatalog == null || !currentCatalog.equalsIgnoreCase(catalog)) {
logs.add("current catalog is " + currentCatalog + ", switch to " + catalog + " before execution");
execute("use `" + catalog + "`");
if (TerminalSession.canUseSparkSessionCatalog(sessionConf, catalog)) {
logs.add(String.format("current catalog is %s, " +
"since it's a hive type catalog and can use spark session catalog, " +
"switch to spark_catalog before execution",
currentCatalog));
execute("use `spark_catalog`");
} else {
logs.add(String.format("current catalog is %s, switch to %s before execution",
currentCatalog, catalog));
execute("use `" + catalog + "`");
}
this.currentCatalog = catalog;
}
java.sql.ResultSet rs = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
package com.netease.arctic.server.terminal.kyuubi;

import com.clearspring.analytics.util.Lists;
import com.google.common.collect.Maps;
import com.netease.arctic.server.terminal.SparkContextUtil;
import com.netease.arctic.server.terminal.TerminalSession;
import com.netease.arctic.server.terminal.TerminalSessionFactory;
Expand Down Expand Up @@ -111,7 +110,7 @@ public TerminalSession create(TableMetaStore metaStore, Configurations configura
logMessage(logs, "try to create a kyuubi connection via url: " + kyuubiJdbcUrl);
logMessage(logs, "");

Map<String, String> sessionConf = Maps.newLinkedHashMap();
Map<String, String> sessionConf = configuration.toMap();
sessionConf.put("jdbc.url", kyuubiJdbcUrl);
Properties properties = new Properties();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import com.netease.arctic.table.TableMetaStore;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
import org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions;
import org.apache.spark.SparkConf;
Expand Down Expand Up @@ -65,7 +64,7 @@ public TerminalSession create(TableMetaStore metaStore, Configurations configura
Map<String, String> sparkConf = SparkContextUtil.getSparkConf(configuration);
sparkConf.put(com.netease.arctic.spark.SparkSQLProperties.REFRESH_CATALOG_BEFORE_USAGE, "true");

Map<String, String> finallyConf = Maps.newLinkedHashMap();
Map<String, String> finallyConf = configuration.toMap();
catalogs.stream()
.filter(c -> isIcebergCatalog(c, configuration))
.forEach(c -> setHadoopConfigToSparkSession(c, session, metaStore));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,17 @@ public Map<String, String> configs() {
@Override
public ResultSet executeStatement(String catalog, String statement) {
if (currentCatalog == null || !currentCatalog.equalsIgnoreCase(catalog)) {
session.sql("use `" + catalog + "`");
if (TerminalSession.canUseSparkSessionCatalog(sessionConfigs, catalog)) {
session.sql("use `spark_catalog`");
logs.add(String.format("current catalog is %s, " +
"since it's a hive type catalog and can use spark session catalog, " +
"switch to spark_catalog before execution",
currentCatalog));
} else {
session.sql("use `" + catalog + "`");
logs.add("switch to new catalog via: use " + catalog);
}
currentCatalog = catalog;
logs.add("switch to new catalog via: use " + catalog);
}

Dataset<Row> ds = session.sql(statement);
Expand Down