Skip to content

Commit

Permalink
Support reading hadoop configuration files (#32)
Browse files Browse the repository at this point in the history
  • Loading branch information
tsreaper authored Sep 14, 2023
1 parent 651adde commit 45e003a
Showing 1 changed file with 57 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,33 @@
package org.apache.paimon.trino;

import org.apache.paimon.options.Options;
import org.apache.paimon.utils.StringUtils;

import io.trino.spi.connector.Connector;
import io.trino.spi.connector.ConnectorContext;
import io.trino.spi.connector.ConnectorFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;

import javax.xml.parsers.DocumentBuilderFactory;

import java.io.File;
import java.util.HashMap;
import java.util.Map;

/** Trino {@link ConnectorFactory}. */
public abstract class TrinoConnectorFactoryBase implements ConnectorFactory {

private static final Logger LOG = LoggerFactory.getLogger(TrinoConnectorFactoryBase.class);

// see https://trino.io/docs/current/connector/hive.html#hive-general-configuration-properties
private static final String HADOOP_CONF_FILES_KEY = "hive.config.resources";
// see org.apache.paimon.utils.HadoopUtils
private static final String HADOOP_CONF_PREFIX = "hadoop.";

@Override
public String getName() {
return "paimon";
Expand All @@ -36,9 +54,48 @@ public String getName() {
@Override
public Connector create(
String catalogName, Map<String, String> config, ConnectorContext context) {
config = new HashMap<>(config);
if (config.containsKey(HADOOP_CONF_FILES_KEY)) {
for (String hadoopXml : config.get(HADOOP_CONF_FILES_KEY).split(",")) {
try {
readHadoopXml(hadoopXml, config);
} catch (Exception e) {
LOG.warn(
"Failed to read hadoop xml file " + hadoopXml + ", skipping this file.",
e);
}
}
}

return new TrinoConnector(
new TrinoMetadata(Options.fromMap(config)),
new TrinoSplitManager(),
new TrinoPageSourceProvider());
}

private void readHadoopXml(String path, Map<String, String> config) throws Exception {
path = path.trim();
if (path.isEmpty()) {
return;
}

File xmlFile = new File(path);
NodeList propertyNodes =
DocumentBuilderFactory.newInstance()
.newDocumentBuilder()
.parse(xmlFile)
.getElementsByTagName("property");
for (int i = 0; i < propertyNodes.getLength(); i++) {
Node propertyNode = propertyNodes.item(i);
if (propertyNode.getNodeType() == 1) {
Element propertyElement = (Element) propertyNode;
String key = propertyElement.getElementsByTagName("name").item(0).getTextContent();
String value =
propertyElement.getElementsByTagName("value").item(0).getTextContent();
if (!StringUtils.isNullOrWhitespaceOnly(value)) {
config.putIfAbsent(HADOOP_CONF_PREFIX + key, value);
}
}
}
}
}

0 comments on commit 45e003a

Please sign in to comment.