diff --git a/packaging/pom.xml b/packaging/pom.xml index cfc37f087f90..f7c6ac7aee96 100644 --- a/packaging/pom.xml +++ b/packaging/pom.xml @@ -424,6 +424,11 @@ hive-webhcat-java-client ${project.version} + + org.apache.hive + hive-standalone-metastore-rest-catalog + ${project.version} + org.apache.hadoop hadoop-hdfs-client diff --git a/packaging/src/main/assembly/src.xml b/packaging/src/main/assembly/src.xml index 18b80a6d64a2..9cdbed13776e 100644 --- a/packaging/src/main/assembly/src.xml +++ b/packaging/src/main/assembly/src.xml @@ -105,6 +105,7 @@ standalone-metastore/metastore-common/**/* standalone-metastore/metastore-server/**/* standalone-metastore/metastore-tools/**/* + standalone-metastore/metastore-rest-catalog/**/* standalone-metastore/src/assembly/src.xml standalone-metastore/pom.xml streaming/**/* diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index facbd7b77c81..4d9e7b31ec21 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -304,8 +304,7 @@ static ThriftHiveMetastore.Iface callEmbeddedMetastore(Configuration conf) throw try { Class clazz = Class.forName(HIVE_METASTORE_CLASS); //noinspection JavaReflectionMemberAccess - Method method = clazz.getDeclaredMethod(HIVE_METASTORE_CREATE_HANDLER_METHOD, - Configuration.class); + Method method = clazz.getDeclaredMethod(HIVE_METASTORE_CREATE_HANDLER_METHOD, Configuration.class); method.setAccessible(true); return (ThriftHiveMetastore.Iface) method.invoke(null, conf); } catch (InvocationTargetException e) { diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index c5e5fc3fd753..e0571664193c 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java @@ -1823,8 +1823,46 @@ public enum ConfVars { ), PROPERTIES_SERVLET_AUTH("hive.metastore.properties.servlet.auth", "hive.metastore.properties.servlet.auth", "jwt", + new StringSetValidator("simple", "jwt"), "Property-maps servlet authentication method (simple or jwt)." ), + ICEBERG_CATALOG_SERVLET_FACTORY("hive.metastore.catalog.servlet.factory", + "hive.metastore.catalog.servlet.factory", + "org.apache.iceberg.rest.HMSCatalogFactory", + "HMS Iceberg Catalog servlet factory class name." + + "The factory needs to expose a method: " + + "public static HttpServlet createServlet(Configuration configuration);" + ), + ICEBERG_CATALOG_SERVLET_PATH("hive.metastore.catalog.servlet.path", + "hive.metastore.catalog.servlet.path", "iceberg", + "HMS Iceberg Catalog servlet path component of URL endpoint." + ), + ICEBERG_CATALOG_SERVLET_PORT("hive.metastore.catalog.servlet.port", + "hive.metastore.catalog.servlet.port", -1, + "HMS Iceberg Catalog servlet server port. Negative value disables the servlet," + + " 0 will let the system determine the catalog server port," + + " positive value will be used as-is." + ), + ICEBERG_CATALOG_SERVLET_AUTH("hive.metastore.catalog.servlet.auth", + "hive.metastore.catalog.servlet.auth", "jwt", + "HMS Iceberg Catalog servlet authentication method (simple or jwt)." + ), + ICEBERG_CATALOG_CACHE_EXPIRY("hive.metastore.catalog.cache.expiry", + "hive.metastore.catalog.cache.expiry", 60_000L, + "HMS Iceberg Catalog cache expiry." + ), + HTTPSERVER_THREADPOOL_MIN("hive.metastore.httpserver.threadpool.min", + "hive.metastore.httpserver.threadpool.min", 8, + "HMS embedded HTTP server minimum number of threads." + ), + HTTPSERVER_THREADPOOL_MAX("hive.metastore.httpserver.threadpool.max", + "hive.metastore.httpserver.threadpool.max", 256, + "HMS embedded HTTP server maximum number of threads." + ), + HTTPSERVER_THREADPOOL_IDLE("hive.metastore.httpserver.threadpool.idle", + "hive.metastore.httpserver.threadpool.idle", 60_000L, + "HMS embedded HTTP server thread idle time." + ), // Deprecated Hive values that we are keeping for backwards compatibility. @Deprecated diff --git a/standalone-metastore/metastore-rest-catalog/pom.xml b/standalone-metastore/metastore-rest-catalog/pom.xml new file mode 100644 index 000000000000..3cb1cb9ccee5 --- /dev/null +++ b/standalone-metastore/metastore-rest-catalog/pom.xml @@ -0,0 +1,258 @@ + + + + + hive-standalone-metastore + org.apache.hive + 4.1.0-SNAPSHOT + + 4.0.0 + hive-standalone-metastore-rest-catalog + Hive Metastore REST Catalog + + .. + 8 + 8 + UTF-8 + false + ${project.parent.version} + 1.6.1 + + + + org.apache.hive + hive-standalone-metastore-server + ${hive.version} + + + org.apache.hive + hive-standalone-metastore-common + ${hive.version} + + + org.apache.hive + hive-iceberg-catalog + ${hive.version} + + + + org.apache.hive + hive-standalone-metastore-common + ${hive.version} + tests + test + + + org.apache.hive + hive-standalone-metastore-server + ${hive.version} + tests + test + + + org.apache.httpcomponents.core5 + httpcore5 + 5.2 + test + + + junit + junit + test + + + com.github.tomakehurst + wiremock-jre8-standalone + ${wiremock.jre8.standalone.version} + test + + + org.junit.jupiter + junit-jupiter-api + ${junit.jupiter.version} + test + + + org.apache.hadoop + hadoop-auth + ${hadoop.version} + + + org.slf4j + slf4j-log4j12 + + + org.slf4j + slf4j-reload4j + + + ch.qos.reload4j + reload4j + + + commons-logging + commons-logging + + + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + + + org.slf4j + slf4j-log4j12 + + + org.slf4j + slf4j-reload4j + + + ch.qos.reload4j + reload4j + + + commons-beanutils + commons-beanutils + + + commons-logging + commons-logging + + + + + org.apache.hadoop + hadoop-hdfs-client + ${hadoop.version} + + + org.slf4j + slf4j-log4j12 + + + org.slf4j + slf4j-reload4j + + + ch.qos.reload4j + reload4j + + + commons-logging + commons-logging + + + + + org.apache.hadoop + hadoop-hdfs + ${hadoop.version} + + + org.slf4j + slf4j-log4j12 + + + org.slf4j + slf4j-reload4j + + + ch.qos.reload4j + reload4j + + + commons-logging + commons-logging + + + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop.version} + + + org.slf4j + slf4j-log4j12 + + + org.slf4j + slf4j-reload4j + + + ch.qos.reload4j + reload4j + + + commons-logging + commons-logging + + + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + + + assemble + none + + single + + + + + + org.apache.rat + apache-rat-plugin + + + process-resources + + check + + + + + + org.apache.maven.plugins + maven-surefire-plugin + ${surefire.version} + + + org.codehaus.mojo + exec-maven-plugin + 3.1.0 + + + test + + + + log4j2.debug + false + + + + + + + + + diff --git a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java new file mode 100644 index 000000000000..6b5d76f818a8 --- /dev/null +++ b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iceberg.rest; + +import com.github.benmanes.caffeine.cache.Ticker; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.iceberg.CachingCatalog; +import org.apache.iceberg.catalog.Catalog; +import org.apache.iceberg.catalog.Namespace; +import org.apache.iceberg.catalog.SupportsNamespaces; +import org.apache.iceberg.catalog.TableIdentifier; +import org.apache.iceberg.exceptions.NamespaceNotEmptyException; +import org.apache.iceberg.exceptions.NoSuchNamespaceException; + + +/** + * Class that wraps an Iceberg Catalog to cache tables. + * @param the catalog class + */ +public class HMSCachingCatalog extends CachingCatalog implements SupportsNamespaces { + protected final CATALOG nsCatalog; + + public HMSCachingCatalog(CATALOG catalog, long expiration) { + super(catalog, true, expiration, Ticker.systemTicker()); + nsCatalog = catalog; + } + + public CATALOG hmsUnwrap() { + return nsCatalog; + } + + @Override + public void createNamespace(Namespace nmspc, Map map) { + nsCatalog.createNamespace(nmspc, map); + } + + @Override + public List listNamespaces(Namespace nmspc) throws NoSuchNamespaceException { + return nsCatalog.listNamespaces(nmspc); + } + + @Override + public Map loadNamespaceMetadata(Namespace nmspc) throws NoSuchNamespaceException { + return nsCatalog.loadNamespaceMetadata(nmspc); + } + + @Override + public boolean dropNamespace(Namespace nmspc) throws NamespaceNotEmptyException { + List tables = listTables(nmspc); + for (TableIdentifier ident : tables) { + invalidateTable(ident); + } + return nsCatalog.dropNamespace(nmspc); + } + + @Override + public boolean setProperties(Namespace nmspc, Map map) throws NoSuchNamespaceException { + return nsCatalog.setProperties(nmspc, map); + } + + @Override + public boolean removeProperties(Namespace nmspc, Set set) throws NoSuchNamespaceException { + return nsCatalog.removeProperties(nmspc, set); + } + +} diff --git a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java new file mode 100644 index 000000000000..dbf280396f1e --- /dev/null +++ b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java @@ -0,0 +1,762 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iceberg.rest; + +import com.codahale.metrics.Counter; +import org.apache.hadoop.hive.metastore.metrics.Metrics; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.function.Consumer; +import java.util.stream.Collectors; +import org.apache.iceberg.BaseTable; +import org.apache.iceberg.BaseTransaction; +import org.apache.iceberg.Table; +import org.apache.iceberg.Transaction; +import org.apache.iceberg.Transactions; +import org.apache.iceberg.catalog.Catalog; +import org.apache.iceberg.catalog.Namespace; +import org.apache.iceberg.catalog.SupportsNamespaces; +import org.apache.iceberg.catalog.TableIdentifier; +import org.apache.iceberg.catalog.ViewCatalog; +import org.apache.iceberg.exceptions.AlreadyExistsException; +import org.apache.iceberg.exceptions.CommitFailedException; +import org.apache.iceberg.exceptions.CommitStateUnknownException; +import org.apache.iceberg.exceptions.ForbiddenException; +import org.apache.iceberg.exceptions.NamespaceNotEmptyException; +import org.apache.iceberg.exceptions.NoSuchIcebergTableException; +import org.apache.iceberg.exceptions.NoSuchNamespaceException; +import org.apache.iceberg.exceptions.NoSuchTableException; +import org.apache.iceberg.exceptions.NotAuthorizedException; +import org.apache.iceberg.exceptions.RESTException; +import org.apache.iceberg.exceptions.UnprocessableEntityException; +import org.apache.iceberg.exceptions.ValidationException; +import org.apache.iceberg.relocated.com.google.common.base.Splitter; +import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; +import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.rest.requests.CommitTransactionRequest; +import org.apache.iceberg.rest.requests.CreateNamespaceRequest; +import org.apache.iceberg.rest.requests.CreateTableRequest; +import org.apache.iceberg.rest.requests.CreateViewRequest; +import org.apache.iceberg.rest.requests.RegisterTableRequest; +import org.apache.iceberg.rest.requests.RenameTableRequest; +import org.apache.iceberg.rest.requests.ReportMetricsRequest; +import org.apache.iceberg.rest.requests.UpdateNamespacePropertiesRequest; +import org.apache.iceberg.rest.requests.UpdateTableRequest; +import org.apache.iceberg.rest.responses.ConfigResponse; +import org.apache.iceberg.rest.responses.CreateNamespaceResponse; +import org.apache.iceberg.rest.responses.ErrorResponse; +import org.apache.iceberg.rest.responses.GetNamespaceResponse; +import org.apache.iceberg.rest.responses.ListNamespacesResponse; +import org.apache.iceberg.rest.responses.ListTablesResponse; +import org.apache.iceberg.rest.responses.LoadTableResponse; +import org.apache.iceberg.rest.responses.LoadViewResponse; +import org.apache.iceberg.rest.responses.OAuthTokenResponse; +import org.apache.iceberg.rest.responses.UpdateNamespacePropertiesResponse; +import org.apache.iceberg.util.Pair; +import org.apache.iceberg.util.PropertyUtil; + +/** + * Original @ https://github.com/apache/iceberg/blob/1.6.x/core/src/test/java/org/apache/iceberg/rest/RESTCatalogAdapter.java + * Adaptor class to translate REST requests into {@link Catalog} API calls. + */ +public class HMSCatalogAdapter implements RESTClient { + /** The metric names prefix. */ + static final String HMS_METRIC_PREFIX = "hmscatalog."; + private static final Splitter SLASH = Splitter.on('/'); + + private static final Map, Integer> EXCEPTION_ERROR_CODES = + ImmutableMap., Integer>builder() + .put(NamespaceNotSupported.class, 400) + .put(IllegalArgumentException.class, 400) + .put(ValidationException.class, 400) + .put(NamespaceNotEmptyException.class, 400) + .put(NotAuthorizedException.class, 401) + .put(ForbiddenException.class, 403) + .put(NoSuchNamespaceException.class, 404) + .put(NoSuchTableException.class, 404) + .put(NoSuchIcebergTableException.class, 404) + .put(UnsupportedOperationException.class, 406) + .put(AlreadyExistsException.class, 409) + .put(CommitFailedException.class, 409) + .put(UnprocessableEntityException.class, 422) + .put(CommitStateUnknownException.class, 500) + .buildOrThrow(); + + private static final String URN_OAUTH_TOKEN_EXCHANGE = "urn:ietf:params:oauth:grant-type:token-exchange"; + private static final String URN_OAUTH_ACCESS_TOKEN = "urn:ietf:params:oauth:token-type:access_token"; + private static final String GRANT_TYPE = "grant_type"; + private static final String CLIENT_CREDENTIALS = "client_credentials"; + private static final String BEARER = "Bearer"; + private static final String CLIENT_ID = "client_id"; + private static final String ACTOR_TOKEN = "actor_token"; + private static final String SUBJECT_TOKEN = "subject_token"; + private static final String VIEWS_PATH = "v1/namespaces/{namespace}/views/{name}"; + private static final String TABLES_PATH = "v1/namespaces/{namespace}/tables/{table}"; + + private final Catalog catalog; + private final SupportsNamespaces asNamespaceCatalog; + private final ViewCatalog asViewCatalog; + + + public HMSCatalogAdapter(Catalog catalog) { + this.catalog = catalog; + this.asNamespaceCatalog = + catalog instanceof SupportsNamespaces ? (SupportsNamespaces) catalog : null; + this.asViewCatalog = catalog instanceof ViewCatalog ? (ViewCatalog) catalog : null; + } + + enum HTTPMethod { + GET, + HEAD, + POST, + DELETE + } + + enum Route { + TOKENS(HTTPMethod.POST, "v1/oauth/tokens", + null, OAuthTokenResponse.class), + SEPARATE_AUTH_TOKENS_URI(HTTPMethod.POST, "https://auth-server.com/token", + null, OAuthTokenResponse.class), + CONFIG(HTTPMethod.GET, "v1/config", + null, ConfigResponse.class), + LIST_NAMESPACES(HTTPMethod.GET, "v1/namespaces", + null, ListNamespacesResponse.class), + CREATE_NAMESPACE(HTTPMethod.POST, "v1/namespaces", + CreateNamespaceRequest.class, CreateNamespaceResponse.class), + LOAD_NAMESPACE(HTTPMethod.GET, "v1/namespaces/{namespace}", + null, GetNamespaceResponse.class), + DROP_NAMESPACE(HTTPMethod.DELETE, "v1/namespaces/{namespace}"), + UPDATE_NAMESPACE(HTTPMethod.POST, "v1/namespaces/{namespace}/properties", + UpdateNamespacePropertiesRequest.class, UpdateNamespacePropertiesResponse.class), + LIST_TABLES(HTTPMethod.GET, "v1/namespaces/{namespace}/tables", + null, ListTablesResponse.class), + CREATE_TABLE(HTTPMethod.POST, "v1/namespaces/{namespace}/tables", + CreateTableRequest.class, LoadTableResponse.class), + LOAD_TABLE(HTTPMethod.GET, TABLES_PATH, + null, LoadTableResponse.class), + REGISTER_TABLE(HTTPMethod.POST, "v1/namespaces/{namespace}/register", + RegisterTableRequest.class, LoadTableResponse.class), + UPDATE_TABLE(HTTPMethod.POST, TABLES_PATH, + UpdateTableRequest.class, LoadTableResponse.class), + DROP_TABLE(HTTPMethod.DELETE, TABLES_PATH), + RENAME_TABLE(HTTPMethod.POST, "v1/tables/rename", + RenameTableRequest.class, null), + REPORT_METRICS(HTTPMethod.POST, "v1/namespaces/{namespace}/tables/{table}/metrics", + ReportMetricsRequest.class, null), + COMMIT_TRANSACTION(HTTPMethod.POST, "v1/transactions/commit", + CommitTransactionRequest.class, null), + LIST_VIEWS(HTTPMethod.GET, "v1/namespaces/{namespace}/views", + null, ListTablesResponse.class), + LOAD_VIEW(HTTPMethod.GET, VIEWS_PATH, + null, LoadViewResponse.class), + CREATE_VIEW(HTTPMethod.POST, "v1/namespaces/{namespace}/views", + CreateViewRequest.class, LoadViewResponse.class), + UPDATE_VIEW(HTTPMethod.POST, VIEWS_PATH, + UpdateTableRequest.class, LoadViewResponse.class), + RENAME_VIEW(HTTPMethod.POST, "v1/views/rename", + RenameTableRequest.class, null), + DROP_VIEW(HTTPMethod.DELETE, VIEWS_PATH); + + private final HTTPMethod method; + private final int requiredLength; + private final Map requirements; + private final Map variables; + private final Class requestClass; + private final Class responseClass; + + /** + * An exception safe way of getting a route by name. + * + * @param name the route name + * @return the route instance or null if it could not be found + */ + static Route byName(String name) { + try { + return valueOf(name.toUpperCase()); + } catch (IllegalArgumentException xill) { + return null; + } + } + + Route(HTTPMethod method, String pattern) { + this(method, pattern, null, null); + } + + Route(HTTPMethod method, String pattern, + Class requestClass, + Class responseClass + ) { + this.method = method; + // parse the pattern into requirements and variables + List parts = SLASH.splitToList(pattern); + ImmutableMap.Builder requirementsBuilder = ImmutableMap.builder(); + ImmutableMap.Builder variablesBuilder = ImmutableMap.builder(); + for (int pos = 0; pos < parts.size(); pos += 1) { + String part = parts.get(pos); + if (part.startsWith("{") && part.endsWith("}")) { + variablesBuilder.put(pos, part.substring(1, part.length() - 1)); + } else { + requirementsBuilder.put(pos, part); + } + } + this.requestClass = requestClass; + this.responseClass = responseClass; + this.requiredLength = parts.size(); + this.requirements = requirementsBuilder.build(); + this.variables = variablesBuilder.build(); + } + + private boolean matches(HTTPMethod requestMethod, List requestPath) { + return method == requestMethod && + requiredLength == requestPath.size() && + requirements.entrySet().stream() + .allMatch( + requirement -> + requirement + .getValue() + .equalsIgnoreCase(requestPath.get(requirement.getKey()))); + } + + private Map variables(List requestPath) { + ImmutableMap.Builder vars = ImmutableMap.builder(); + variables.forEach((key, value) -> vars.put(value, requestPath.get(key))); + return vars.build(); + } + + public static Pair> from(HTTPMethod method, String path) { + List parts = SLASH.splitToList(path); + for (Route candidate : Route.values()) { + if (candidate.matches(method, parts)) { + return Pair.of(candidate, candidate.variables(parts)); + } + } + + return null; + } + + public Class requestClass() { + return requestClass; + } + + public Class responseClass() { + return responseClass; + } + } + + /** + * @param route a route/api-call name + * @return the metric counter name for the api-call + */ + static String hmsCatalogMetricCount(String route) { + return HMS_METRIC_PREFIX + route.toLowerCase() + ".count"; + } + + /** + * @param apis an optional list of known api call names + * @return the list of metric names for the HMSCatalog class + */ + public static List getMetricNames(String... apis) { + final List routes; + if (apis != null && apis.length > 0) { + routes = Arrays.stream(apis) + .map(HMSCatalogAdapter.Route::byName) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } else { + routes = Arrays.asList(HMSCatalogAdapter.Route.values()); + } + final List metricNames = new ArrayList<>(routes.size()); + for (HMSCatalogAdapter.Route route : routes) { + metricNames.add(hmsCatalogMetricCount(route.name())); + } + return metricNames; + } + + private ConfigResponse config() { + return castResponse(ConfigResponse.class, ConfigResponse.builder().build()); + } + + private OAuthTokenResponse tokens(Object body) { + @SuppressWarnings("unchecked") + Map request = (Map) castRequest(Map.class, body); + String grantType = request.get(GRANT_TYPE); + switch (grantType) { + case CLIENT_CREDENTIALS: + return OAuthTokenResponse.builder() + .withToken("client-credentials-token:sub=" + request.get(CLIENT_ID)) + .withIssuedTokenType(URN_OAUTH_ACCESS_TOKEN) + .withTokenType(BEARER) + .build(); + + case URN_OAUTH_TOKEN_EXCHANGE: + String actor = request.get(ACTOR_TOKEN); + String token = + String.format( + "token-exchange-token:sub=%s%s", + request.get(SUBJECT_TOKEN), actor != null ? ",act=" + actor : ""); + return OAuthTokenResponse.builder() + .withToken(token) + .withIssuedTokenType(URN_OAUTH_ACCESS_TOKEN) + .withTokenType(BEARER) + .build(); + + default: + throw new UnsupportedOperationException("Unsupported grant_type: " + grantType); + } + } + + private ListNamespacesResponse listNamespaces(Map vars) { + if (asNamespaceCatalog != null) { + Namespace namespace; + if (vars.containsKey("parent")) { + namespace = Namespace.of(RESTUtil.NAMESPACE_SPLITTER.splitToStream(vars.get("parent")).toArray(String[]::new)); + } else { + namespace = Namespace.empty(); + } + return castResponse(ListNamespacesResponse.class, CatalogHandlers.listNamespaces(asNamespaceCatalog, namespace)); + } + throw new NamespaceNotSupported(catalog.toString()); + } + + private CreateNamespaceResponse createNamespace(Object body) { + if (asNamespaceCatalog != null) { + CreateNamespaceRequest request = castRequest(CreateNamespaceRequest.class, body); + return castResponse( + CreateNamespaceResponse.class, CatalogHandlers.createNamespace(asNamespaceCatalog, request)); + } + throw new NamespaceNotSupported(catalog.toString()); + } + + private GetNamespaceResponse loadNamespace(Map vars) { + if (asNamespaceCatalog != null) { + Namespace namespace = namespaceFromPathVars(vars); + return castResponse( + GetNamespaceResponse.class, CatalogHandlers.loadNamespace(asNamespaceCatalog, namespace)); + } + throw new NamespaceNotSupported(catalog.toString()); + } + + private RESTResponse dropNamespace(Map vars) { + if (asNamespaceCatalog != null) { + CatalogHandlers.dropNamespace(asNamespaceCatalog, namespaceFromPathVars(vars)); + } + throw new NamespaceNotSupported(catalog.toString()); + } + + private UpdateNamespacePropertiesResponse updateNamespace(Map vars, Object body) { + if (asNamespaceCatalog != null) { + Namespace namespace = namespaceFromPathVars(vars); + UpdateNamespacePropertiesRequest request = + castRequest(UpdateNamespacePropertiesRequest.class, body); + return castResponse( + UpdateNamespacePropertiesResponse.class, + CatalogHandlers.updateNamespaceProperties(asNamespaceCatalog, namespace, request)); + } + throw new NamespaceNotSupported(catalog.toString()); + } + + private ListTablesResponse listTables(Map vars) { + Namespace namespace = namespaceFromPathVars(vars); + return castResponse(ListTablesResponse.class, CatalogHandlers.listTables(catalog, namespace)); + } + + private LoadTableResponse createTable(Map vars, Object body) { + final Class responseType = LoadTableResponse.class; + Namespace namespace = namespaceFromPathVars(vars); + CreateTableRequest request = castRequest(CreateTableRequest.class, body); + request.validate(); + if (request.stageCreate()) { + return castResponse( + responseType, CatalogHandlers.stageTableCreate(catalog, namespace, request)); + } else { + return castResponse( + responseType, CatalogHandlers.createTable(catalog, namespace, request)); + } + } + + private RESTResponse dropTable(Map vars) { + if (PropertyUtil.propertyAsBoolean(vars, "purgeRequested", false)) { + CatalogHandlers.purgeTable(catalog, identFromPathVars(vars)); + } else { + CatalogHandlers.dropTable(catalog, identFromPathVars(vars)); + } + return null; + } + + private LoadTableResponse loadTable(Map vars) { + TableIdentifier ident = identFromPathVars(vars); + return castResponse(LoadTableResponse.class, CatalogHandlers.loadTable(catalog, ident)); + } + + private LoadTableResponse registerTable(Map vars, Object body) { + Namespace namespace = namespaceFromPathVars(vars); + RegisterTableRequest request = castRequest(RegisterTableRequest.class, body); + return castResponse(LoadTableResponse.class, CatalogHandlers.registerTable(catalog, namespace, request)); + } + + private LoadTableResponse updateTable(Map vars, Object body) { + TableIdentifier ident = identFromPathVars(vars); + UpdateTableRequest request = castRequest(UpdateTableRequest.class, body); + return castResponse(LoadTableResponse.class, CatalogHandlers.updateTable(catalog, ident, request)); + } + + private RESTResponse renameTable(Object body) { + RenameTableRequest request = castRequest(RenameTableRequest.class, body); + CatalogHandlers.renameTable(catalog, request); + return null; + } + + private RESTResponse reportMetrics(Object body) { + // nothing to do here other than checking that we're getting the correct request + castRequest(ReportMetricsRequest.class, body); + return null; + } + + private RESTResponse commitTransaction(Object body) { + CommitTransactionRequest request = castRequest(CommitTransactionRequest.class, body); + commitTransaction(catalog, request); + return null; + } + + private ListTablesResponse listViews(Map vars) { + if (null != asViewCatalog) { + Namespace namespace = namespaceFromPathVars(vars); + String pageToken = PropertyUtil.propertyAsString(vars, "pageToken", null); + String pageSize = PropertyUtil.propertyAsString(vars, "pageSize", null); + if (pageSize != null) { + return castResponse( + ListTablesResponse.class, + CatalogHandlers.listViews(asViewCatalog, namespace, pageToken, pageSize)); + } else { + return castResponse( + ListTablesResponse.class, CatalogHandlers.listViews(asViewCatalog, namespace)); + } + } + throw new ViewNotSupported(catalog.toString()); + } + + private LoadViewResponse createView(Map vars, Object body) { + if (null != asViewCatalog) { + Namespace namespace = namespaceFromPathVars(vars); + CreateViewRequest request = castRequest(CreateViewRequest.class, body); + return castResponse( + LoadViewResponse.class, CatalogHandlers.createView(asViewCatalog, namespace, request)); + } + throw new ViewNotSupported(catalog.toString()); + } + + private LoadViewResponse loadView(Map vars) { + if (null != asViewCatalog) { + TableIdentifier ident = identFromPathVars(vars); + return castResponse(LoadViewResponse.class, CatalogHandlers.loadView(asViewCatalog, ident)); + } + throw new ViewNotSupported(catalog.toString()); + } + + private LoadViewResponse updateView(Map vars, Object body) { + if (null != asViewCatalog) { + TableIdentifier ident = identFromPathVars(vars); + UpdateTableRequest request = castRequest(UpdateTableRequest.class, body); + return castResponse( + LoadViewResponse.class, CatalogHandlers.updateView(asViewCatalog, ident, request)); + } + throw new ViewNotSupported(catalog.toString()); + } + + private RESTResponse renameView(Object body) { + if (null != asViewCatalog) { + RenameTableRequest request = castRequest(RenameTableRequest.class, body); + CatalogHandlers.renameView(asViewCatalog, request); + return null; + } + throw new ViewNotSupported(catalog.toString()); + } + + private RESTResponse dropView(Map vars) { + if (null != asViewCatalog) { + CatalogHandlers.dropView(asViewCatalog, identFromPathVars(vars)); + return null; + } + throw new ViewNotSupported(catalog.toString()); + } + + /** + * This is a very simplistic approach that only validates the requirements for each table and does + * not do any other conflict detection. Therefore, it does not guarantee true transactional + * atomicity, which is left to the implementation details of a REST server. + */ + private static void commitTransaction(Catalog catalog, CommitTransactionRequest request) { + List transactions = Lists.newArrayList(); + + for (UpdateTableRequest tableChange : request.tableChanges()) { + Table table = catalog.loadTable(tableChange.identifier()); + if (table instanceof BaseTable) { + Transaction transaction = + Transactions.newTransaction( + tableChange.identifier().toString(), ((BaseTable) table).operations()); + transactions.add(transaction); + + BaseTransaction.TransactionTable txTable = + (BaseTransaction.TransactionTable) transaction.table(); + + // this performs validations and makes temporary commits that are in-memory + CatalogHandlers.commit(txTable.operations(), tableChange); + } else { + throw new IllegalStateException("Cannot wrap catalog that does not produce BaseTable"); + } + } + // only commit if validations passed previously + transactions.forEach(Transaction::commitTransaction); + } + + @SuppressWarnings({"MethodLength", "unchecked"}) + private T handleRequest( + Route route, Map vars, Object body) { + // update HMS catalog route counter metric + final String metricName = hmsCatalogMetricCount(route.name()); + Counter counter = Metrics.getOrCreateCounter(metricName); + if (counter != null) { + counter.inc(); + } + switch (route) { + case TOKENS: + return (T) tokens(body); + + case CONFIG: + return (T) config(); + + case LIST_NAMESPACES: + return (T) listNamespaces(vars); + + case CREATE_NAMESPACE: + return (T) createNamespace(body); + + case LOAD_NAMESPACE: + return (T) loadNamespace(vars); + + case DROP_NAMESPACE: + return (T) dropNamespace(vars); + + case UPDATE_NAMESPACE: + return (T) updateNamespace(vars, body); + + case LIST_TABLES: + return (T) listTables(vars); + + case CREATE_TABLE: + return (T) createTable(vars, body); + + case DROP_TABLE: + return (T) dropTable(vars); + + case LOAD_TABLE: + return (T) loadTable(vars); + + case REGISTER_TABLE: + return (T) registerTable(vars, body); + + case UPDATE_TABLE: + return (T) updateTable(vars, body); + + case RENAME_TABLE: + return (T) renameTable(body); + + case REPORT_METRICS: + return (T) reportMetrics(body); + + case COMMIT_TRANSACTION: + return (T) commitTransaction(body); + + case LIST_VIEWS: + return (T) listViews(vars); + + case CREATE_VIEW: + return (T) createView(vars, body); + + case LOAD_VIEW: + return (T) loadView(vars); + + case UPDATE_VIEW: + return (T) updateView(vars, body); + + case RENAME_VIEW: + return (T) renameView(vars); + + case DROP_VIEW: + return (T) dropView(vars); + + default: + } + return null; + } + + + T execute( + HTTPMethod method, + String path, + Map queryParams, + Object body, + Class responseType, + Map headers, + Consumer errorHandler) { + ErrorResponse.Builder errorBuilder = ErrorResponse.builder(); + Pair> routeAndVars = Route.from(method, path); + if (routeAndVars != null) { + try { + ImmutableMap.Builder vars = ImmutableMap.builder(); + if (queryParams != null) { + vars.putAll(queryParams); + } + vars.putAll(routeAndVars.second()); + return handleRequest(routeAndVars.first(), vars.build(), body); + } catch (RuntimeException e) { + configureResponseFromException(e, errorBuilder); + } + } else { + errorBuilder + .responseCode(400) + .withType("BadRequestException") + .withMessage(String.format("No route for request: %s %s", method, path)); + } + ErrorResponse error = errorBuilder.build(); + errorHandler.accept(error); + // if the error handler doesn't throw an exception, throw a generic one + throw new RESTException("Unhandled error: %s", error); + } + + @Override + public T delete( + String path, + Class responseType, + Map headers, + Consumer errorHandler) { + return execute(HTTPMethod.DELETE, path, null, null, responseType, headers, errorHandler); + } + + @Override + public T delete( + String path, + Map queryParams, + Class responseType, + Map headers, + Consumer errorHandler) { + return execute(HTTPMethod.DELETE, path, queryParams, null, responseType, headers, errorHandler); + } + + @Override + public T post( + String path, + RESTRequest body, + Class responseType, + Map headers, + Consumer errorHandler) { + return execute(HTTPMethod.POST, path, null, body, responseType, headers, errorHandler); + } + + @Override + public T get( + String path, + Map queryParams, + Class responseType, + Map headers, + Consumer errorHandler) { + return execute(HTTPMethod.GET, path, queryParams, null, responseType, headers, errorHandler); + } + + @Override + public void head(String path, Map headers, Consumer errorHandler) { + execute(HTTPMethod.HEAD, path, null, null, null, headers, errorHandler); + } + + @Override + public T postForm( + String path, + Map formData, + Class responseType, + Map headers, + Consumer errorHandler) { + return execute(HTTPMethod.POST, path, null, formData, responseType, headers, errorHandler); + } + + @Override + public void close() { + // The caller is responsible for closing the underlying catalog backing this REST catalog. + } + + private static class NamespaceNotSupported extends RuntimeException { + NamespaceNotSupported(String catalog) { + super("catalog " + catalog + " does not support namespace"); + } + } + + private static class ViewNotSupported extends RuntimeException { + ViewNotSupported(String catalog) { + super("catalog " + catalog + " does not support views"); + } + } + + private static class BadResponseType extends RuntimeException { + private BadResponseType(Class responseType, Object response) { + super( + String.format("Invalid response object, not a %s: %s", responseType.getName(), response)); + } + } + + private static class BadRequestType extends RuntimeException { + private BadRequestType(Class requestType, Object request) { + super(String.format("Invalid request object, not a %s: %s", requestType.getName(), request)); + } + } + + public static T castRequest(Class requestType, Object request) { + if (requestType.isInstance(request)) { + return requestType.cast(request); + } + throw new BadRequestType(requestType, request); + } + + public static T castResponse(Class responseType, Object response) { + if (responseType.isInstance(response)) { + return responseType.cast(response); + } + throw new BadResponseType(responseType, response); + } + + public static void configureResponseFromException( + Exception exc, ErrorResponse.Builder errorBuilder) { + errorBuilder + .responseCode(EXCEPTION_ERROR_CODES.getOrDefault(exc.getClass(), 500)) + .withType(exc.getClass().getSimpleName()) + .withMessage(exc.getMessage()) + .withStackTrace(exc); + } + + private static Namespace namespaceFromPathVars(Map pathVars) { + return RESTUtil.decodeNamespace(pathVars.get("namespace")); + } + + private static TableIdentifier identFromPathVars(Map pathVars) { + return TableIdentifier.of( + namespaceFromPathVars(pathVars), RESTUtil.decodeString(pathVars.get("table"))); + } +} diff --git a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java new file mode 100644 index 000000000000..1bddb3e6842d --- /dev/null +++ b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java @@ -0,0 +1,162 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.rest; + +import java.io.IOException; +import java.lang.ref.Reference; +import java.lang.ref.SoftReference; +import java.util.Map; +import java.util.TreeMap; +import java.util.concurrent.atomic.AtomicReference; +import javax.servlet.http.HttpServlet; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.ServletSecurity; +import org.apache.hadoop.hive.metastore.ServletServerBuilder; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.iceberg.catalog.Catalog; +import org.apache.iceberg.hive.HiveCatalog; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Catalog & servlet factory. + */ +public class HMSCatalogFactory { + private static final Logger LOG = LoggerFactory.getLogger(HMSCatalogFactory.class); + /** + * Convenience soft reference to last catalog. + */ + protected static final AtomicReference> catalogRef = new AtomicReference<>(); + + public static Catalog getLastCatalog() { + Reference soft = catalogRef.get(); + return soft != null ? soft.get() : null; + } + + protected static void setLastCatalog(Catalog catalog) { + catalogRef.set(new SoftReference<>(catalog)); + } + + protected final Configuration configuration; + protected final int port; + protected final String path; + protected Catalog catalog; + + /** + * Factory constructor. + *

Called by the static method {@link HMSCatalogFactory#createServlet(Configuration)} that is + * declared in configuration and found through introspection.

+ * @param conf the configuration + * @param catalog the catalog + */ + protected HMSCatalogFactory(Configuration conf, Catalog catalog) { + port = MetastoreConf.getIntVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PORT); + path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PATH); + this.configuration = conf; + this.catalog = catalog; + } + + public int getPort() { + return port; + } + + public String getPath() { + return path; + } + + public Catalog getCatalog() { + return catalog; + } + + /** + * Creates the catalog instance. + * @return the catalog + */ + protected Catalog createCatalog() { + final Map properties = new TreeMap<>(); + MetastoreConf.setVar(configuration, MetastoreConf.ConfVars.THRIFT_URIS, ""); + final String configUri = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.THRIFT_URIS); + if (configUri != null) { + properties.put("uri", configUri); + } + final String configWarehouse = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.WAREHOUSE); + if (configWarehouse != null) { + properties.put("warehouse", configWarehouse); + } + final String configExtWarehouse = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.WAREHOUSE_EXTERNAL); + if (configExtWarehouse != null) { + properties.put("external-warehouse", configExtWarehouse); + } + final HiveCatalog hiveCatalog = new org.apache.iceberg.hive.HiveCatalog(); + hiveCatalog.setConf(configuration); + final String catalogName = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.CATALOG_DEFAULT); + hiveCatalog.initialize(catalogName, properties); + long expiry = MetastoreConf.getLongVar(configuration, MetastoreConf.ConfVars.ICEBERG_CATALOG_CACHE_EXPIRY); + return expiry > 0 ? new HMSCachingCatalog<>(hiveCatalog, expiry) : hiveCatalog; + } + + /** + * Creates the REST catalog servlet instance. + * @param catalog the Iceberg catalog + * @return the servlet + */ + protected HttpServlet createServlet(Catalog catalog) { + ServletSecurity security = new ServletSecurity(configuration); + return security.proxy(new HMSCatalogServlet(new HMSCatalogAdapter(catalog))); + } + + /** + * Creates the REST catalog servlet instance. + * @return the servlet + * @throws IOException if creation fails + */ + protected HttpServlet createServlet() throws IOException { + if (port >= 0 && path != null && !path.isEmpty()) { + Catalog actualCatalog = catalog; + if (actualCatalog == null) { + actualCatalog = catalog = createCatalog(); + } + setLastCatalog(actualCatalog); + return createServlet(actualCatalog); + } + return null; + } + + /** + * Factory method to describe Iceberg servlet. + *

This method name is found through configuration as {@link MetastoreConf.ConfVars#ICEBERG_CATALOG_SERVLET_FACTORY} + * and looked up through reflection to start from HMS.

+ * + * @param configuration the configuration + * @return the servlet descriptor instance + */ + @SuppressWarnings("unused") + public static ServletServerBuilder.Descriptor createServlet(Configuration configuration) { + try { + HMSCatalogFactory hms = new HMSCatalogFactory(configuration, null); + HttpServlet servlet = hms.createServlet(); + if (servlet != null) { + return new ServletServerBuilder.Descriptor(hms.getPort(), hms.getPath(), servlet); + } + } catch (IOException exception) { + LOG.error("failed to create servlet ", exception); + } + return null; + } +} diff --git a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java new file mode 100644 index 000000000000..b164709149b1 --- /dev/null +++ b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java @@ -0,0 +1,209 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iceberg.rest; + +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.Reader; +import java.io.UncheckedIOException; +import java.util.Collections; +import java.util.Map; +import java.util.Optional; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; +import org.apache.iceberg.relocated.com.google.common.io.CharStreams; +import org.apache.iceberg.rest.HMSCatalogAdapter.HTTPMethod; +import org.apache.iceberg.rest.HMSCatalogAdapter.Route; +import org.apache.iceberg.rest.responses.ErrorResponse; +import org.apache.iceberg.util.Pair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Original @ https://github.com/apache/iceberg/blob/1.6.x/core/src/test/java/org/apache/iceberg/rest/RESTCatalogServlet.java + * The RESTCatalogServlet provides a servlet implementation used in combination with a + * RESTCatalogAdaptor to proxy the REST Spec to any Catalog implementation. + */ +public class HMSCatalogServlet extends HttpServlet { + private static final Logger LOG = LoggerFactory.getLogger(HMSCatalogServlet.class); + private static final String CONTENT_TYPE = "Content-Type"; + private static final String APPLICATION_JSON = "application/json"; + + private final HMSCatalogAdapter restCatalogAdapter; + private final Map responseHeaders = + ImmutableMap.of(CONTENT_TYPE, APPLICATION_JSON); + + public HMSCatalogServlet(HMSCatalogAdapter restCatalogAdapter) { + this.restCatalogAdapter = restCatalogAdapter; + } + + @Override + public String getServletName() { + return "Iceberg REST Catalog"; + } + + @Override + protected void service(HttpServletRequest request, HttpServletResponse response) { + try { + ServletRequestContext context = ServletRequestContext.from(request); + response.setStatus(HttpServletResponse.SC_OK); + responseHeaders.forEach(response::setHeader); + final Optional error = context.error(); + if (error.isPresent()) { + response.setStatus(HttpServletResponse.SC_BAD_REQUEST); + RESTObjectMapper.mapper().writeValue(response.getWriter(), error.get()); + return; + } + Object responseBody = + restCatalogAdapter.execute( + context.method(), + context.path(), + context.queryParams(), + context.body(), + context.route().responseClass(), + context.headers(), + handle(response)); + + if (responseBody != null) { + RESTObjectMapper.mapper().writeValue(response.getWriter(), responseBody); + } + } catch (RuntimeException | IOException e) { + // should be a RESTException but not able to see them through dependencies + LOG.error("Error processing REST request", e); + response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); + } + } + + protected Consumer handle(HttpServletResponse response) { + return errorResponse -> { + response.setStatus(errorResponse.code()); + try { + RESTObjectMapper.mapper().writeValue(response.getWriter(), errorResponse); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }; + } + + public static class ServletRequestContext { + private HTTPMethod method; + private Route route; + private String path; + private Map headers; + private Map queryParams; + private Object body; + + private ErrorResponse errorResponse; + + private ServletRequestContext(ErrorResponse errorResponse) { + this.errorResponse = errorResponse; + } + + private ServletRequestContext( + HTTPMethod method, + Route route, + String path, + Map headers, + Map queryParams, + Object body) { + this.method = method; + this.route = route; + this.path = path; + this.headers = headers; + this.queryParams = queryParams; + this.body = body; + } + + static ServletRequestContext from(HttpServletRequest request) throws IOException { + HTTPMethod method = HTTPMethod.valueOf(request.getMethod()); + // path = uri - context-path + servlet-path + / + String path = request.getPathInfo(); + if (path == null) { + path = request.getRequestURI().substring( + request.getContextPath().length() + request.getServletPath().length()); + } + // remove leading / + path = path.substring(1); + Pair> routeContext = Route.from(method, path); + + if (routeContext == null) { + return new ServletRequestContext( + ErrorResponse.builder() + .responseCode(400) + .withType("BadRequestException") + .withMessage(String.format("No route for request: %s %s", method, path)) + .build()); + } + + Route route = routeContext.first(); + Object requestBody = null; + if (route.requestClass() != null) { + requestBody = + RESTObjectMapper.mapper().readValue(request.getReader(), route.requestClass()); + } else if (route == Route.TOKENS) { + try (Reader reader = new InputStreamReader(request.getInputStream())) { + requestBody = RESTUtil.decodeFormData(CharStreams.toString(reader)); + } + } + + Map queryParams = + request.getParameterMap().entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue()[0])); + Map headers = + Collections.list(request.getHeaderNames()).stream() + .collect(Collectors.toMap(Function.identity(), request::getHeader)); + + return new ServletRequestContext(method, route, path, headers, queryParams, requestBody); + } + + HTTPMethod method() { + return method; + } + + Route route() { + return route; + } + + public String path() { + return path; + } + + public Map headers() { + return headers; + } + + public Map queryParams() { + return queryParams; + } + + public Object body() { + return body; + } + + public Optional error() { + return Optional.ofNullable(errorResponse); + } + } +} diff --git a/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java new file mode 100644 index 000000000000..8ae4faf51400 --- /dev/null +++ b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.hive; + +/** + * Test helper utility. + */ +public class IcebergTestHelper { + /** + * Invalidates all clients remaining in the cached client pool held by the + * Catalog instance(s). + *

This is necessary when a new catalog is instantiated to avoid reusing + * old clients that may point to a (now) defunct catalog.

+ */ + public static void invalidatePoolCache() { + CachedClientPool.clientPoolCache().invalidateAll(); + } + +} diff --git a/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java new file mode 100644 index 000000000000..d6b48a84dec4 --- /dev/null +++ b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java @@ -0,0 +1,451 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iceberg.rest; + +import com.codahale.metrics.Counter; +import com.codahale.metrics.MetricRegistry; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.ok; +import com.github.tomakehurst.wiremock.junit.WireMockRule; +import com.nimbusds.jose.JWSAlgorithm; +import com.nimbusds.jose.JWSHeader; +import com.nimbusds.jose.JWSSigner; +import com.nimbusds.jose.crypto.RSASSASigner; +import com.nimbusds.jose.jwk.RSAKey; +import com.nimbusds.jwt.JWTClaimsSet; +import com.nimbusds.jwt.SignedJWT; +import java.io.BufferedReader; +import java.io.DataOutputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.net.HttpURLConnection; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.UUID; +import java.util.concurrent.TimeUnit; +import javax.servlet.http.HttpServletResponse; +import org.apache.commons.jexl3.JexlBuilder; +import org.apache.commons.jexl3.JexlContext; +import org.apache.commons.jexl3.JexlEngine; +import org.apache.commons.jexl3.JexlException; +import org.apache.commons.jexl3.JexlFeatures; +import org.apache.commons.jexl3.MapContext; +import org.apache.commons.jexl3.introspection.JexlPermissions; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaException; +import org.apache.hadoop.hive.metastore.HiveMetaStore; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfo; +import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; +import org.apache.hadoop.hive.metastore.ObjectStore; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.metrics.Metrics; +import org.apache.hadoop.hive.metastore.properties.HMSPropertyManager; +import org.apache.hadoop.hive.metastore.properties.PropertyManager; +import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge; +import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; +import org.apache.hive.iceberg.com.fasterxml.jackson.core.JsonProcessingException; +import org.apache.hive.iceberg.com.fasterxml.jackson.core.type.TypeReference; +import org.apache.hive.iceberg.com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.iceberg.catalog.Catalog; +import org.apache.iceberg.catalog.SupportsNamespaces; +import org.apache.iceberg.hive.IcebergTestHelper; +import org.eclipse.jetty.server.Server; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.rules.TemporaryFolder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public abstract class HMSTestBase { + protected static final Logger LOG = LoggerFactory.getLogger(HMSTestBase.class.getName()); + protected static final String BASE_DIR = System.getProperty("basedir"); + protected static Random RND = new Random(20230922); + protected static final String USER_1 = "USER_1"; + protected static final String DB_NAME = "hivedb"; + /** A Jexl engine for convenience. */ + static final JexlEngine JEXL; + static { + JexlFeatures features = new JexlFeatures() + .sideEffect(false) + .sideEffectGlobal(false); + JexlPermissions p = JexlPermissions.RESTRICTED + .compose("org.apache.hadoop.hive.metastore.*", "org.apache.iceberg.*"); + JEXL = new JexlBuilder() + .features(features) + .permissions(p) + .create(); + } + + protected static final long EVICTION_INTERVAL = TimeUnit.SECONDS.toMillis(10); + private static final File JWT_AUTHKEY_FILE = + new File(BASE_DIR,"src/test/resources/auth/jwt/jwt-authorized-key.json"); + protected static final File JWT_NOAUTHKEY_FILE = + new File(BASE_DIR,"src/test/resources/auth/jwt/jwt-unauthorized-key.json"); + protected static final File JWT_JWKS_FILE = + new File(BASE_DIR,"src/test/resources/auth/jwt/jwt-verification-jwks.json"); + protected static final int MOCK_JWKS_SERVER_PORT = 8089; + @ClassRule + public static final WireMockRule MOCK_JWKS_SERVER = new WireMockRule(MOCK_JWKS_SERVER_PORT); + + + public static class TestSchemaInfo extends MetaStoreSchemaInfo { + public TestSchemaInfo(String metastoreHome, String dbType) throws HiveMetaException { + super(metastoreHome, dbType); + } + @Override + public String getMetaStoreScriptDir() { + return new File(BASE_DIR, "../metastore-server/src/main/sql/derby").getAbsolutePath(); + } + } + + @Rule + public TemporaryFolder temp = new TemporaryFolder(); + + protected Configuration conf = null; + protected String NS = "hms" + RND.nextInt(100); + + protected int port = -1; + protected int catalogPort = -1; + protected final String catalogPath = "hmscatalog"; + protected static final int WAIT_FOR_SERVER = 5000; + // for direct calls + protected Catalog catalog; + protected SupportsNamespaces nsCatalog; + + protected int createMetastoreServer(Configuration conf) throws Exception { + return MetaStoreTestUtils.startMetaStoreWithRetry(HadoopThriftAuthBridge.getBridge(), conf); + } + + protected void stopMetastoreServer(int port) { + MetaStoreTestUtils.close(port); + } + + @Before + public void setUp() throws Exception { + NS = "hms" + RND.nextInt(100); + conf = MetastoreConf.newMetastoreConf(); + MetaStoreTestUtils.setConfForStandloneMode(conf); + MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.CAPABILITY_CHECK, false); + MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST, true); + // new 2024-10-02 + MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.SCHEMA_VERIFICATION, false); + + conf.setBoolean(MetastoreConf.ConfVars.METRICS_ENABLED.getVarname(), true); + // "hive.metastore.warehouse.dir" + String whpath = new File(BASE_DIR,"target/tmp/warehouse/managed").toURI().toString(); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.WAREHOUSE, whpath); + HiveConf.setVar(conf, HiveConf.ConfVars.METASTORE_WAREHOUSE, whpath); + // "hive.metastore.warehouse.external.dir" + String extwhpath = new File(BASE_DIR,"target/tmp/warehouse/external").toURI().toString(); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.WAREHOUSE_EXTERNAL, extwhpath); + conf.set(HiveConf.ConfVars.HIVE_METASTORE_WAREHOUSE_EXTERNAL.varname, extwhpath); + + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.SCHEMA_INFO_CLASS, "org.apache.iceberg.rest.HMSTestBase$TestSchemaInfo"); + // Events that get cleaned happen in batches of 1 to exercise batching code + MetastoreConf.setLongVar(conf, MetastoreConf.ConfVars.EVENT_CLEAN_MAX_EVENTS, 1L); + MetastoreConf.setLongVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PORT, 0); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_AUTH, "jwt"); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PATH, catalogPath); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.THRIFT_METASTORE_AUTHENTICATION_JWT_JWKS_URL, + "http://localhost:" + MOCK_JWKS_SERVER_PORT + "/jwks"); + MOCK_JWKS_SERVER.stubFor(get("/jwks") + .willReturn(ok() + .withBody(Files.readAllBytes(JWT_JWKS_FILE.toPath())))); + Metrics.initialize(conf); + // The server + port = createMetastoreServer(conf); + System.out.println("Starting MetaStore Server on port " + port); + // The manager decl + PropertyManager.declare(NS, HMSPropertyManager.class); + // The client + HiveMetaStoreClient client = createClient(conf); + Assert.assertNotNull("Unable to connect to the MetaStore server", client); + + // create a managed root + String location = temp.newFolder("hivedb2023").getAbsolutePath(); + Database db = new Database(DB_NAME, "catalog test", location, Collections.emptyMap()); + client.createDatabase(db); + + Catalog ice = acquireServer(); + catalog = ice; + nsCatalog = catalog instanceof SupportsNamespaces? (SupportsNamespaces) catalog : null; + catalogPort = HiveMetaStore.getCatalogServletPort(); + } + + private static String format(String format, Object... params) { + return org.slf4j.helpers.MessageFormatter.arrayFormat(format, params).getMessage(); + } + + private static Catalog acquireServer() throws InterruptedException { + final int wait = 200; + Server iceServer = HiveMetaStore.getServletServer(); + int tries = WAIT_FOR_SERVER / wait; + while(iceServer == null && tries-- > 0) { + Thread.sleep(wait); + iceServer = HiveMetaStore.getServletServer(); + } + if (iceServer != null) { + boolean starting; + tries = WAIT_FOR_SERVER / wait; + while((starting = iceServer.isStarting()) && tries-- > 0) { + Thread.sleep(wait); + } + if (starting) { + LOG.warn("server still starting after {}ms", WAIT_FOR_SERVER); + } + Catalog ice = HMSCatalogFactory.getLastCatalog(); + if (ice == null) { + throw new NullPointerException(format("unable to acquire catalog after {}ms", WAIT_FOR_SERVER)); + } + return ice; + } else { + throw new NullPointerException(format("unable to acquire server after {}ms", WAIT_FOR_SERVER)); + } + } + + protected HiveMetaStoreClient createClient(Configuration conf) throws Exception { + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.THRIFT_URIS, ""); + MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.EXECUTE_SET_UGI, false); + return new HiveMetaStoreClient(conf); + } + + /** + * @param apis a list of api calls + * @return the map of HMSCatalog route counter metrics keyed by their names + */ + static Map reportMetricCounters(String... apis) { + Map map = new LinkedHashMap<>(); + MetricRegistry registry = Metrics.getRegistry(); + List names = HMSCatalogAdapter.getMetricNames(apis); + for(String name : names) { + Counter counter = registry.counter(name); + if (counter != null) { + long count = counter.getCount(); + map.put(name, count); + } + } + return map; + } + + @After + public void tearDown() throws Exception { + try { + if (port >= 0) { + System.out.println("Stopping MetaStore Server on port " + port); + stopMetastoreServer(port); + port = -1; + } + // Clear the SSL system properties before each test. + System.clearProperty(ObjectStore.TRUSTSTORE_PATH_KEY); + System.clearProperty(ObjectStore.TRUSTSTORE_PASSWORD_KEY); + System.clearProperty(ObjectStore.TRUSTSTORE_TYPE_KEY); + // + IcebergTestHelper.invalidatePoolCache(); + } finally { + catalog = null; + nsCatalog = null; + catalogPort = -1; + conf = null; + } + } + + protected String generateJWT() throws Exception { + return generateJWT(JWT_AUTHKEY_FILE.toPath()); + } + protected String generateJWT(Path path) throws Exception { + return generateJWT(USER_1, path, TimeUnit.MINUTES.toMillis(5)); + } + + private static String generateJWT(String user, Path keyFile, long lifeTimeMillis) throws Exception { + RSAKey rsaKeyPair = RSAKey.parse(new String(java.nio.file.Files.readAllBytes(keyFile), StandardCharsets.UTF_8)); + // Create RSA-signer with the private key + JWSSigner signer = new RSASSASigner(rsaKeyPair); + JWSHeader header = new JWSHeader + .Builder(JWSAlgorithm.RS256) + .keyID(rsaKeyPair.getKeyID()) + .build(); + Date now = new Date(); + Date expirationTime = new Date(now.getTime() + lifeTimeMillis); + JWTClaimsSet claimsSet = new JWTClaimsSet.Builder() + .jwtID(UUID.randomUUID().toString()) + .issueTime(now) + .issuer("auth-server") + .subject(user) + .expirationTime(expirationTime) + .claim("custom-claim-or-payload", "custom-claim-or-payload") + .build(); + SignedJWT signedJWT = new SignedJWT(header, claimsSet); + // Compute the RSA signature + signedJWT.sign(signer); + return signedJWT.serialize(); + } + + /** + * Performs a Json client call. + * @param jwt the jwt token + * @param url the url + * @param method the http method + * @param arg the argument that will be transported as JSon + * @return the result the was returned through Json + * @throws IOException if marshalling the request/response fail + */ + public static Object clientCall(String jwt, URL url, String method, Object arg) throws IOException { + return clientCall(jwt, url, method, true, arg); + } + + public static class ServerResponse { + private final int code; + private final String content; + public ServerResponse(int code, String content) { + this.code = code; + this.content = content; + } + } + + /** + * Performs an http client call. + * @param jwt a JWT bearer token (can be null) + * @param url the url to call + * @param method the http method to use + * @param json whether the call is application/json (true) or application/x-www-form-urlencoded (false) + * @param arg the query argument + * @return the (JSON) response + * @throws IOException + */ + public static Object clientCall(String jwt, URL url, String method, boolean json, Object arg) throws IOException { + HttpURLConnection con = (HttpURLConnection) url.openConnection(); + try { + if ("PATCH".equals(method)) { + con.setRequestMethod("POST"); + con.setRequestProperty("X-HTTP-Method-Override", "PATCH"); + } else { + con.setRequestMethod(method); + } + con.setRequestProperty(MetaStoreUtils.USER_NAME_HTTP_HEADER, url.getUserInfo()); + if (json) { + con.setRequestProperty("Content-Type", "application/json"); + } else { + con.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); + } + con.setRequestProperty("Accept", "application/json"); + if (jwt != null) { + con.setRequestProperty("Authorization", "Bearer " + jwt); + } + con.setDoInput(true); + if (arg != null) { + con.setDoOutput(true); + try (DataOutputStream wr = new DataOutputStream(con.getOutputStream())) { + if (json) { + wr.writeBytes(serialize(arg)); + } else { + wr.writeBytes(arg.toString()); + } + wr.flush(); + } + } + // perform http method + return httpResponse(con); + } finally { + con.disconnect(); + } + } + + private static Object httpResponse(HttpURLConnection con) throws IOException { + int responseCode = con.getResponseCode(); + InputStream responseStream = con.getErrorStream(); + if (responseStream == null) { + try { + responseStream = con.getInputStream(); + } catch (IOException e) { + return new ServerResponse(responseCode, e.getMessage()); + } + } + if (responseStream != null) { + try (BufferedReader reader = new BufferedReader( + new InputStreamReader(responseStream, StandardCharsets.UTF_8))) { + // if not strictly ok, check we are still receiving a JSON + if (responseCode != HttpServletResponse.SC_OK) { + String contentType = con.getContentType(); + if (contentType == null || !contentType.contains("application/json")) { + String line; + StringBuilder response = new StringBuilder("error " + responseCode + ":"); + while ((line = reader.readLine()) != null) response.append(line); + return new ServerResponse(responseCode, response.toString()); + } + } + // there might be no answer which is still ok + Object r = reader.ready() ? deserialize(reader) : new HashMap<>(1); + if (r instanceof Map) { + ((Map) r).put("status", responseCode); + } + return r; + } + } + return responseCode; +} + + private static final ObjectMapper MAPPER = RESTObjectMapper.mapper(); + + static String serialize(T object) { + try { + return MAPPER.writeValueAsString(object); + } catch (JsonProcessingException xany) { + throw new RuntimeException(xany); + } + } + + static T deserialize(Reader s) { + try { + return MAPPER.readValue(s, new TypeReference() {}); + } catch (IOException xany) { + throw new RuntimeException(xany); + } + } + + static Object eval(Object properties, String expr) { + try { + JexlContext context = properties instanceof Map + ? new MapContext((Map) properties) + : JexlEngine.EMPTY_CONTEXT; + Object result = JEXL.createScript(expr).execute(context, properties); + return result; + } catch (JexlException xany) { + throw xany; + } + } +} \ No newline at end of file diff --git a/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java new file mode 100644 index 000000000000..7b05602fc123 --- /dev/null +++ b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java @@ -0,0 +1,213 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iceberg.rest; + +import java.io.IOException; +import java.net.URI; +import java.net.URL; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.iceberg.Schema; +import org.apache.iceberg.Table; +import org.apache.iceberg.Transaction; +import org.apache.iceberg.catalog.Namespace; +import org.apache.iceberg.catalog.TableIdentifier; +import org.apache.iceberg.exceptions.NoSuchTableException; +import org.apache.iceberg.rest.requests.CreateTableRequest; +import org.apache.iceberg.rest.requests.RenameTableRequest; +import org.apache.iceberg.types.Types; +import static org.apache.iceberg.types.Types.NestedField.required; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +public class TestHMSCatalog extends HMSTestBase { + public TestHMSCatalog() { + super(); + } + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + } + + @After + @Override + public void tearDown() throws Exception { + super.tearDown(); + } + + @Test + public void testCreateNamespaceHttp() throws Exception { + String ns = "nstesthttp"; + // list namespaces + URL url = new URL("http://hive@localhost:" + catalogPort + "/"+catalogPath+"/v1/namespaces"); + String jwt = generateJWT(); + // check namespaces list (ie 0) + Object response = clientCall(jwt, url, "GET", null); + Assert.assertTrue(response instanceof Map); + Map nsrep = (Map) response; + List> nslist = (List>) nsrep.get("namespaces"); + Assert.assertEquals(2, nslist.size()); + Assert.assertTrue((nslist.contains(Collections.singletonList("default")))); + Assert.assertTrue((nslist.contains(Collections.singletonList("hivedb")))); + // succeed + response = clientCall(jwt, url, "POST", false, "{ \"namespace\" : [ \""+ns+"\" ], "+ + "\"properties\":{ \"owner\": \"apache\", \"group\" : \"iceberg\" }" + +"}"); + Assert.assertNotNull(response); + HiveMetaStoreClient client = createClient(conf); + Database database1 = client.getDatabase(ns); + Assert.assertEquals("apache", database1.getParameters().get("owner")); + Assert.assertEquals("iceberg", database1.getParameters().get("group")); + + List tis = catalog.listTables(Namespace.of(ns)); + Assert.assertTrue(tis.isEmpty()); + + // list tables in hivedb + url = new URL("http://hive@localhost:" + catalogPort + "/" + catalogPath+"/v1/namespaces/" + ns + "/tables"); + // succeed + response = clientCall(jwt, url, "GET", null); + Assert.assertNotNull(response); + + // quick check on metrics + Map counters = reportMetricCounters("list_namespaces", "list_tables"); + counters.forEach((key, value) -> Assert.assertTrue(key, value > 0)); + } + + private Schema getTestSchema() { + return new Schema( + required(1, "id", Types.IntegerType.get(), "unique ID"), + required(2, "data", Types.StringType.get())); + } + + + @Test + public void testCreateTableTxnBuilder() throws Exception { + URI iceUri = URI.create("http://hive@localhost:" + catalogPort + "/"+catalogPath+"/v1/"); + String jwt = generateJWT(); + Schema schema = getTestSchema(); + final String tblName = "tbl_" + Integer.toHexString(RND.nextInt(65536)); + final TableIdentifier tableIdent = TableIdentifier.of(DB_NAME, tblName); + String location = temp.newFolder(tableIdent.toString()).toString(); + + try { + Transaction txn = catalog.buildTable(tableIdent, schema) + .withLocation(location) + .createTransaction(); + txn.commitTransaction(); + Table table = catalog.loadTable(tableIdent); + + Assert.assertEquals(location, table.location()); + Assert.assertEquals(2, table.schema().columns().size()); + Assert.assertTrue(table.spec().isUnpartitioned()); + List tis = catalog.listTables(Namespace.of(DB_NAME)); + Assert.assertFalse(tis.isEmpty()); + + // list namespaces + URL url = iceUri.resolve("namespaces").toURL(); + // succeed + Object response = clientCall(jwt, url, "GET", null); + Assert.assertNotNull(response); + Assert.assertEquals(200, (int) eval(response, "json -> json.status")); + List> nslist = (List>) eval(response, "json -> json.namespaces"); + Assert.assertEquals(2, nslist.size()); + Assert.assertTrue((nslist.contains(Collections.singletonList("default")))); + Assert.assertTrue((nslist.contains(Collections.singletonList("hivedb")))); + + // list tables in hivedb + url = iceUri.resolve("namespaces/" + DB_NAME + "/tables").toURL(); + // succeed + response = clientCall(jwt, url, "GET", null); + Assert.assertNotNull(response); + Assert.assertEquals(200, (int) eval(response, "json -> json.status")); + Assert.assertEquals(1, (int) eval(response, "json -> size(json.identifiers)")); + Assert.assertEquals(tblName, eval(response, "json -> json.identifiers[0].name")); + + // load table + url = iceUri.resolve("namespaces/" + DB_NAME + "/tables/" + tblName).toURL(); + // succeed + response = clientCall(jwt, url, "GET", null); + Assert.assertNotNull(response); + Assert.assertEquals(200, (int) eval(response, "json -> json.status")); + Assert.assertEquals(location, eval(response, "json -> json.metadata.location")); + + // quick check on metrics + Map counters = reportMetricCounters("list_namespaces", "list_tables", "load_table"); + counters.forEach((key, value) -> Assert.assertTrue(key, value > 0)); + table = catalog.loadTable(tableIdent); + Assert.assertNotNull(table); + } catch (IOException xany) { + Assert.fail(xany.getMessage()); + } finally { + catalog.dropTable(tableIdent, false); + } + } + + + @Test + public void testTableAPI() throws Exception { + URI iceUri = URI.create("http://hive@localhost:" + catalogPort + "/"+catalogPath+"/v1/"); + String jwt = generateJWT(); + Schema schema = getTestSchema(); + final String tblName = "tbl_" + Integer.toHexString(RND.nextInt(65536)); + final TableIdentifier tableIdent = TableIdentifier.of(DB_NAME, tblName); + String location = temp.newFolder(tableIdent.toString()).toString(); + // create table + CreateTableRequest create = CreateTableRequest.builder(). + withName(tblName). + withLocation(location). + withSchema(schema).build(); + URL url = iceUri.resolve("namespaces/" + DB_NAME + "/tables").toURL(); + Object response = clientCall(jwt, url, "POST", create); + Assert.assertNotNull(response); + Assert.assertEquals(200, (int) eval(response, "json -> json.status")); + Assert.assertEquals(location, eval(response, "json -> json.metadata.location")); + Table table = catalog.loadTable(tableIdent); + Assert.assertEquals(location, table.location()); + + // rename table + final String rtblName = "TBL_" + Integer.toHexString(RND.nextInt(65536)); + final TableIdentifier rtableIdent = TableIdentifier.of(DB_NAME, rtblName); + RenameTableRequest rename = RenameTableRequest.builder(). + withSource(tableIdent). + withDestination(rtableIdent). + build(); + url = iceUri.resolve("tables/rename").toURL(); + response = clientCall(jwt, url, "POST", rename); + Assert.assertNotNull(response); + Assert.assertEquals(200, (int) eval(response, "json -> json.status")); + table = catalog.loadTable(rtableIdent); + Assert.assertEquals(location, table.location()); + + // delete table + url = iceUri.resolve("namespaces/" + DB_NAME + "/tables/" + rtblName).toURL(); + response = clientCall(jwt, url, "DELETE", null); + Assert.assertNotNull(response); + Assert.assertEquals(200, (int) eval(response, "json -> json.status")); + Assert.assertThrows(NoSuchTableException.class, () -> catalog.loadTable(rtableIdent)); + } + +} diff --git a/standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json b/standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json new file mode 100644 index 000000000000..b5b4fb40e7c9 --- /dev/null +++ b/standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json @@ -0,0 +1,12 @@ +{ + "p": "-8lxjB9JZA44XBLLVGnY20x28uT8NQ1BlbqI0Tlr96An4B_PzgPL5_bFFB7SWs8ehSWn9z2SJfClhQpBLfy-2mXvJek_xgibESIlPXqY9Qrg7-PhRmPs3whyiIsnn8tpPMm2XJ_4n0Y-Yfx4nwErGdy84LiKFMDXPEk2a7ndYWs", + "kty": "RSA", + "q": "0YAcTLBnTrSUiciE0lliIkAidW0TnHP48v-vJitLEz0d8mlTZ_aeOQJm6CUOqF7BqQv3Z8OK_HYKXfOr7xzUlfROONybUXRFE0LvT5Fjvrq-56QGB6GeFq5i6HKlRcC_8TD6WwUJWIzeYuPqhp_FYIpT4ds131d5VYPKDCdY_dM", + "d": "VsxW72idEAtoZQDphvxJ0t54EyRfcIJVB9BZuqnyNTfH-VsaUO3st86w_PMU_i0lmyIc8dkCmwOb8R2pRXDo6UxEYUe5YfBnvn9iYF3Ll2QfPOKfZhDBOfqSjEb1po20is7mXTQORBv3bhSo664pasHItTwDz-KKI-FiIu_PYq0lYihuaedUUMp3MQTvDFulpFWEKzqseBDat07BholvxjzlnBK-Ez3KI9qGH8VIIk5TGW5pVu3cQe1WC8NJOe3xR9vu7XX6xvhVLPP7fvKiXJWJ_I_SagAhR1JW0uDJl_b0CrYYeVUnt_pzvW1BeJGz7ysCXcHlLBUh72XrpW-O7Q", + "e": "AQAB", + "kid": "123", + "qi": "9yk0mg4LY48YS8cvG51wMVfKfEjSbt2ygKxqabdsP-qSVpz-KVJtCmbKa57jm2BaMV_mRBQFodxu4XN58VGsj5MzXC5Jb_CkLeQfkp6ZKvehZhiJn3HF0Kb19u9xPvKDclHpKl-UMM1Pcu8Ww52DOyOYcHa1_SLZ05CcOWvMkS8", + "dp": "HYtToYeCSxVIE7W42hzZb1IXmwS3e1ok2fbbWwGL47CNPUU-UwQrBvrzwRqkwDcRc7opbV9yKLWGFohPgZ_onSPc3evyqcAUwfvptr8N96LhJgTtSB8tijYpilAZxCxQGuvoVBIJUFcjtsezN6Uhc5VtLEk7GphOKSrGEfnrOiU", + "dq": "tF2uf5v0JT-1DnazW4IWydQblqtlEfKKp3LX8W2egh7BNJ3XcA9UI1LdFAord2u1IXwq8YvZkgdyX3bVVNSmdb_SxIOxuMv4WF_tNry-eku-5iFCC7nqKC7U-rkRb19GIToAoPJSHImTQOJmXKcbQEV3eGDJHdLqpGQFRLdvl38", + "n": "zg12QaFTsez1EijOYRFzNZdowOt79ePqxCMQ-EEHynUhEZ6TIDnXfjWfuWocS1qRRglUUbHerEtmACUKPQShaG8uL0ZXiLqDr2QSuqrTtr2VUGesxZc6GiqkZlnWFNu5kSUvtemcKxWl8OLFf-5kNnGW4_4xM6BIwosYZnddfFqQT5IP6iTMZIUIKXxY4s1dadYRIiMteNutro67fhOLKabHkyC6ILE6f6VZsYbb_NXC5yC--7DiC2GYKzy7TKmaczuDfQZVgVY-nL9kTPIdhf334EYHQfYmLdvLc56g8-cxY3xh2GnwAj1JcT2u3hsS4KS05bUFHFnveO5uxIYKMQ" +} \ No newline at end of file diff --git a/standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json b/standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json new file mode 100644 index 000000000000..f4845de7459d --- /dev/null +++ b/standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json @@ -0,0 +1,12 @@ +{ + "p": "wvzuDSY6dIsIJB0UM5BIncN6ui5ee-KHpCmBhh_ia2iX3DluQODEgITw7gDATTDdQsBD-nJLjrqUs5g5Gmt0UgZucXQ5PCt1CK6dLEZCaLivw2fsHYvOKeTkdA49wqLkTc8pkfQs09N-b6NspDDqVJPFffBvFpR_IBFay-xKa5k", + "kty": "RSA", + "q": "sQzza69VkEmgUm50pEGjgu-OxugOrjcHrjQ42A23YVwAAJ90qPNQa62O7dv5oWmSX2PJ7TgjkzbvtTycLfT_vUeapwfCcJe4WoDg54xF3E35yBvBIwReRiavxf5nWsHEtd5kBg6wRIndGwGUBE91xaLg21spjH7nQKtG9vKeNM8", + "d": "UbiPIpr7agQqpM3ERfaXsKNMETyBrIYr3yoggHQ7XQkSPepCgRhE86puRmjf76FtZ3RwpJwjLfO6Ap0fIE9LXXE8otTF9sMnC9fe7odHkEu61Wr3aQM-53dgZoJL7XU53LOo0cNO44SBbw11d2cYlAR3KuCEK7bCLMBOkK1gdxVpgDC7DgxVgnP39bUlf4fA5gQeT5nNGnCWTV4jMVWCyEb0Ck5CvGJp1cCKaMSEvV4j6AM72EkAn8PogTSOJpurRJaTky0De7-ncT2Sv5DCuOIkMhsHqayLbm7a84ORHqsnWpZV85WVW-xxiivkVpqtSDRKCI94pMa9DWszjNJW8Q", + "e": "AQAB", + "kid": "sig-1642039368", + "qi": "CXP_tewCHyXk6PNDcbI0wtXsaWJryOJfMsc7roBCoOwDbTekUFXhOfRmFX5ZTNetRNDpw9nNiQDXt8pyw7UZ-0EhD1cLst1slS__hBi5QEAGo9cUxl3RGeMAFtY9O8B1gjFyKkG5BzdddGBKGQT3Tg23Eyzn6EA_NCw4XAKnkwQ", + "dp": "aAdzphZQN595n3LYNU50P59sWeqlRCkuvvnZ_coDDdUGuFr3pKuGix7iP8is0EISuitD2VmjUCnhbhP3202bCKwfvm4Inz58OT6X4mg1xBNMys8mHPla6-UPsY9rie1IKu8suY7xX65FlaA2NT9XtfoE8tUVH5HoZR59N7EAX3k", + "dq": "mTkZDO-fgBCH4-7dmS2JIY7KpI897T2IsxVUwH4WXvastd1Jq9FuntGEKYu_HRbtawpEPbzg5M2dY97BVvB5xshKKhWIC8Lx87knapw19XOyIKEMY46rO9DNO-9waNXatH5zV96sY5RgOrgB7j0KMnFEYfIiIgnNfmT8NElB63c", + "n": "htq92ltGQrZv19TlhluoqmXjjRXw_NWEd0nPZsWrbLnr8lZ-gOxsjIsDMjb5HNDNmuAS7pg2d_o5ZZAY1sSjKf_EuUPZN-MOej8ZBOtrMxEH7e_t37kYIbbJSuzt55poZdRli6BE8CVDesS4W-wsFZ0MvUazAUADh3onARN7Arf3jwknm5CLafE_JzKrNKZadBElEFEAEu5y9n_SuTlemw3P81lOVmZmjGjfqtPx01O5aV_truMjrQa3NUivu1ihrjvJl0xc3rwJe7qDrfEqgvpBQ-vrAsvg3Jiz5Idj6cU3J0hNtV4ixYxcDQecNlgR7gBeIp3E8BXL1kGOOHYUtw" +} \ No newline at end of file diff --git a/standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json b/standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json new file mode 100644 index 000000000000..a6fd935a0a3b --- /dev/null +++ b/standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json @@ -0,0 +1,20 @@ +{ + "keys": [ + { + "kty": "RSA", + "e": "AQAB", + "alg": "RS256", + "kid": "819d1e61429dd3d3caef129c0ac2bae8c6d46fbc", + "use": "sig", + "n": "qfR12Bcs_hSL0Y1fN5TYZeUQIFmuVRYa210na81BFj91xxwtICJY6ckZCI3Jf0v2tPLOT_iKVk4WBCZ7AVJVvZqHuttkyrdFROMVTe6DwmcjbbkgACMVildTnHy9xy2KuX-OZsEYzgHuRgfe_Y-JN6LoxBYZx6VoBLpgK-F0Q-0O_bRgZhHifVG4ZzARjhgz0PvBb700GtOTHS6mQIfToPErbgqcowKN9k-mJqJr8xpXSHils-Yw97LHjICZmvA5B8EPNW28DwFOE5JrsPcyrFKOAYl4NcSYQgjl-17TWE5_tFdZ8Lz-srjiPMoHlBjZD1C7aO03LI-_9u8lVsktMw" + }, + { + "kty": "RSA", + "e": "AQAB", + "alg": "RS256", + "kid": "123", + "use": "sig", + "n": "zg12QaFTsez1EijOYRFzNZdowOt79ePqxCMQ-EEHynUhEZ6TIDnXfjWfuWocS1qRRglUUbHerEtmACUKPQShaG8uL0ZXiLqDr2QSuqrTtr2VUGesxZc6GiqkZlnWFNu5kSUvtemcKxWl8OLFf-5kNnGW4_4xM6BIwosYZnddfFqQT5IP6iTMZIUIKXxY4s1dadYRIiMteNutro67fhOLKabHkyC6ILE6f6VZsYbb_NXC5yC--7DiC2GYKzy7TKmaczuDfQZVgVY-nL9kTPIdhf334EYHQfYmLdvLc56g8-cxY3xh2GnwAj1JcT2u3hsS4KS05bUFHFnveO5uxIYKMQ" + } + ] +} \ No newline at end of file diff --git a/standalone-metastore/metastore-rest-catalog/src/test/resources/log4j2.properties b/standalone-metastore/metastore-rest-catalog/src/test/resources/log4j2.properties new file mode 100644 index 000000000000..7d592ef2df94 --- /dev/null +++ b/standalone-metastore/metastore-rest-catalog/src/test/resources/log4j2.properties @@ -0,0 +1,38 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +name=PropertiesConfig +property.filename = logs +appenders = console + +appender.console.type = Console +appender.console.name = STDOUT +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = [%-5level] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %c{5} - %msg%n + +rootLogger.level = INFO +rootLogger.appenderRefs = stdout +rootLogger.appenderRef.stdout.ref = STDOUT + +loggers = HttpClient, JettyHttpServer + +logger.HttpClient.name = org.apache.http.client +logger.HttpClient.level = INFO + +logger.JettyHttpServer.name = org.eclipse.jetty.server +logger.JettyHttpServer.level = INFO + diff --git a/standalone-metastore/metastore-server/pom.xml b/standalone-metastore/metastore-server/pom.xml index 83f0c50894b0..4b818e0d9584 100644 --- a/standalone-metastore/metastore-server/pom.xml +++ b/standalone-metastore/metastore-server/pom.xml @@ -413,7 +413,7 @@ com.github.tomakehurst wiremock-jre8-standalone - 2.32.0 + ${wiremock.jre8.standalone.version} test diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 986bb2e4e848..6cd45e34aff1 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.metastore; +import java.lang.reflect.Method; import java.util.concurrent.ExecutorService; import java.util.concurrent.SynchronousQueue; import org.apache.commons.cli.OptionBuilder; @@ -94,6 +95,7 @@ import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; +import javax.servlet.Servlet; import javax.servlet.ServletRequestEvent; import javax.servlet.ServletRequestListener; /** @@ -119,13 +121,39 @@ public class HiveMetaStore extends ThriftHiveMetastore { private static ZooKeeperHiveHelper zooKeeperHelper = null; private static String msHost = null; private static ThriftServer thriftServer; - private static Server propertyServer = null; + /** the servlet server. */ + private static Server servletServer = null; + /** the port and path of the property servlet. */ + private static int propertyServletPort = -1; + /** the port and path of the catalog servlet. */ + private static int catalogServletPort = -1; - - public static Server getPropertyServer() { - return propertyServer; + /** + * Gets the embedded servlet server. + * @return the server instance or null + */ + public static Server getServletServer() { + return servletServer; } + /** + * Gets the property servlet connector port. + *

If configuration is 0, this port is allocated by the system.

+ * @return the connector port or -1 if not configured + */ + public static int getPropertyServletPort() { + return propertyServletPort; + } + + /** + * Gets the catalog servlet connector port. + *

If configuration is 0, this port is allocated by the system.

+ * @return the connector port or -1 if not configured + */ + public static int getCatalogServletPort() { + return catalogServletPort; + } + public static boolean isRenameAllowed(Database srcDB, Database destDB) { if (!srcDB.getName().equalsIgnoreCase(destDB.getName())) { if (ReplChangeManager.isSourceOfReplication(srcDB) || ReplChangeManager.isSourceOfReplication(destDB)) { @@ -309,6 +337,15 @@ public static void main(String[] args) throws Throwable { if (isCliVerbose) { System.err.println(shutdownMsg); } + // servlet server + if (servletServer != null) { + try { + servletServer.stop(); + } catch (Exception e) { + LOG.error("Error stopping Property Map server.", e); + } + } + // metrics if (MetastoreConf.getBoolVar(conf, ConfVars.METRICS_ENABLED)) { try { Metrics.shutdown(); @@ -379,7 +416,7 @@ private static ThriftServer startHttpMetastore(int port, Configuration conf) throws Exception { LOG.info("Attempting to start http metastore server on port: {}", port); // login principal if security is enabled - ServletSecurity.loginServerPincipal(conf); + ServletSecurity.loginServerPrincipal(conf); long maxMessageSize = MetastoreConf.getLongVar(conf, ConfVars.SERVER_MAX_MESSAGE_SIZE); int minWorkerThreads = MetastoreConf.getIntVar(conf, ConfVars.SERVER_MIN_THREADS); @@ -445,7 +482,9 @@ public void setThreadFactory(ThreadFactory threadFactory) { IHMSHandler handler = HMSHandlerProxyFactory.getProxy(conf, baseHandler, false); processor = new ThriftHiveMetastore.Processor<>(handler); LOG.info("Starting DB backed MetaStore Server with generic processor"); - TServlet thriftHttpServlet = new HmsThriftHttpServlet(processor, protocolFactory, conf); + boolean jwt = MetastoreConf.getVar(conf, ConfVars.THRIFT_METASTORE_AUTHENTICATION).equalsIgnoreCase("jwt"); + ServletSecurity security = new ServletSecurity(conf, jwt); + Servlet thriftHttpServlet = security.proxy(new TServlet(processor, protocolFactory)); boolean directSqlEnabled = MetastoreConf.getBoolVar(conf, ConfVars.TRY_DIRECT_SQL); HMSHandler.LOG.info("Direct SQL optimization = {}", directSqlEnabled); @@ -730,16 +769,49 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge, throw e; } } - // optionally create and start the property server and servlet - propertyServer = PropertyServlet.startServer(conf); + // optionally create and start the property and Iceberg REST server + ServletServerBuilder builder = new ServletServerBuilder(conf); + ServletServerBuilder.Descriptor properties = builder.addServlet(PropertyServlet.createServlet(conf)); + ServletServerBuilder.Descriptor catalog = builder.addServlet(createIcebergServlet(conf)); + servletServer = builder.start(LOG); + if (servletServer != null) { + if (properties != null) { + propertyServletPort = properties.getPort(); + } + if (catalog != null) { + catalogServletPort = catalog.getPort(); + } + } + + // main server thriftServer.start(); } + + /** + * Creates the Iceberg REST catalog servlet descriptor. + * @param configuration the configuration + * @return the servlet descriptor (can be null) + */ + static ServletServerBuilder.Descriptor createIcebergServlet(Configuration configuration) { + try { + String className = MetastoreConf.getVar(configuration, ConfVars.ICEBERG_CATALOG_SERVLET_FACTORY); + Class iceClazz = Class.forName(className); + Method iceStart = iceClazz.getMethod("createServlet", Configuration.class); + return (ServletServerBuilder.Descriptor) iceStart.invoke(null, configuration); + } catch (ClassNotFoundException xnf) { + LOG.warn("Unable to start Iceberg REST Catalog server, missing jar?", xnf); + return null; + } catch (Exception e) { + LOG.error("Unable to start Iceberg REST Catalog server", e); + return null; + } + } /** * @param port where metastore server is running * @return metastore server instance URL. If the metastore server was bound to a configured - * host, return that appended by port. Otherwise return the externally visible URL of the local + * host, return that appended by port. Otherwise, return the externally visible URL of the local * host with the given port * @throws Exception */ diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HmsThriftHttpServlet.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HmsThriftHttpServlet.java deleted file mode 100644 index 4572f86e0247..000000000000 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HmsThriftHttpServlet.java +++ /dev/null @@ -1,57 +0,0 @@ -/* * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.metastore; - -import java.io.IOException; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.metastore.conf.MetastoreConf; -import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars; - -import org.apache.thrift.TProcessor; -import org.apache.thrift.protocol.TProtocolFactory; -import org.apache.thrift.server.TServlet; - -/* -Servlet class used by HiveMetastore server when running in HTTP mode. -If JWT auth is enabled, then the servlet is also responsible for validating -JWTs sent in the Authorization header in HTTP request. - */ -public class HmsThriftHttpServlet extends TServlet { - private final ServletSecurity security; - - public HmsThriftHttpServlet(TProcessor processor, - TProtocolFactory protocolFactory, Configuration conf) { - super(processor, protocolFactory); - boolean jwt = MetastoreConf.getVar(conf,ConfVars.THRIFT_METASTORE_AUTHENTICATION).equalsIgnoreCase("jwt"); - security = new ServletSecurity(conf, jwt); - } - - public void init() throws ServletException { - super.init(); - security.init(); - } - - @Override - protected void doPost(HttpServletRequest request, - HttpServletResponse response) throws ServletException, IOException { - security.execute(request, response, (q,a)->super.doPost(q,a)); - } -} diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java index 1d6cc9d6ade1..9437d2558f8a 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java @@ -29,11 +29,6 @@ import org.apache.hadoop.hive.metastore.properties.PropertyMap; import org.apache.hadoop.hive.metastore.properties.PropertyStore; import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.server.ServerConnector; -import org.eclipse.jetty.servlet.ServletHandler; -import org.eclipse.jetty.servlet.ServletHolder; -import org.eclipse.jetty.servlet.Source; -import org.eclipse.jetty.util.ssl.SslContextFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -64,15 +59,16 @@ public class PropertyServlet extends HttpServlet { public static final Logger LOGGER = LoggerFactory.getLogger(PropertyServlet.class); /** The configuration. */ private final Configuration configuration; - /** The security. */ - private final ServletSecurity security; PropertyServlet(Configuration configuration) { - String auth = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.PROPERTIES_SERVLET_AUTH); - boolean jwt = auth != null && "jwt".equals(auth.toLowerCase()); - this.security = new ServletSecurity(configuration, jwt); this.configuration = configuration; } + + @Override + public String getServletName() { + return "HMS Property"; + } + private String strError(String msg, Object...args) { return String.format(PTYERROR + msg, args); } @@ -136,18 +132,13 @@ private void writeJson(HttpServletResponse response, Object value) throws IOExce writer.flush(); } + @Override public void init() throws ServletException { super.init(); - security.init(); } @Override protected void doPost(HttpServletRequest request, - HttpServletResponse response) throws ServletException, IOException { - security.execute(request, response, PropertyServlet.this::runPost); - } - - private void runPost(HttpServletRequest request, HttpServletResponse response) throws ServletException { final RawStore ms = getMS(); final String ns = getNamespace(request.getRequestURI()); @@ -171,44 +162,12 @@ private void runPost(HttpServletRequest request, switch (method) { // fetch a list of qualified keys by name case "fetchProperties": { - // one or many keys - Object jsonKeys = call.get("keys"); - if (jsonKeys == null) { - throw new IllegalArgumentException("null keys"); - } - Iterable keys = jsonKeys instanceof List - ? (List) jsonKeys - : Collections.singletonList(jsonKeys); - Map properties = new TreeMap<>(); - for (Object okey : keys) { - String key = okey.toString(); - String value = mgr.exportPropertyValue(key); - if (value != null) { - properties.put(key, value); - } - } - reactions.add(properties); + fetchProperties( mgr, call, reactions); break; } // select a list of qualified keys by prefix/predicate/selection case "selectProperties": { - String prefix = (String) call.get("prefix"); - if (prefix == null) { - throw new IllegalArgumentException("null prefix"); - } - String predicate = (String) call.get("predicate"); - // selection may be null, a sole property or a list - Object selection = call.get("selection"); - @SuppressWarnings("unchecked") List project = - selection == null - ? null - : selection instanceof List - ? (List) selection - : Collections.singletonList(selection.toString()); - Map selected = mgr.selectProperties(prefix, predicate, project); - Map> returned = new TreeMap<>(); - selected.forEach((k, v) -> returned.put(k, v.export(project == null))); - reactions.add(returned); + selectProperties(mgr, call, reactions); break; } case "script": { @@ -221,7 +180,7 @@ private void runPost(HttpServletRequest request, break; } default: { - throw new IllegalArgumentException("bad argument type " + action.getClass()); + throw new IllegalArgumentException("Bad argument type " + action.getClass()); } } } @@ -242,26 +201,49 @@ private void runPost(HttpServletRequest request, } } -// A way to import values using files sent over http -// private void importProperties(HttpServletRequest request) throws ServletException, IOException { -// List fileParts = request.getParts().stream() -// .filter(part -> "files".equals(part.getName()) && part.getSize() > 0) -// .collect(Collectors.toList()); // Retrieves -// -// for (Part filePart : fileParts) { -// String fileName = Paths.get(filePart.getSubmittedFileName()).getFileName().toString(); // MSIE fix. -// InputStream fileContent = filePart.getInputStream(); -// // ... (do your job here) -// } -// } + private static void fetchProperties(PropertyManager mgr, Map call, List reactions) { + // one or many keys + Object jsonKeys = call.get("keys"); + if (jsonKeys == null) { + throw new IllegalArgumentException("null keys"); + } + Iterable keys = jsonKeys instanceof List + ? (List) jsonKeys + : Collections.singletonList(jsonKeys); + Map properties = new TreeMap<>(); + for (Object okey : keys) { + String key = okey.toString(); + String value = mgr.exportPropertyValue(key); + if (value != null) { + properties.put(key, value); + } + } + reactions.add(properties); + } + + private static void selectProperties(PropertyManager mgr, Map call, List reactions) { + String prefix = (String) call.get("prefix"); + if (prefix == null) { + throw new IllegalArgumentException("null prefix"); + } + String predicate = (String) call.get("predicate"); + // selection may be null, a sole property or a list + Object selection = call.get("selection"); + @SuppressWarnings("unchecked") List project = + selection == null + ? null + : selection instanceof List + ? (List) selection + : Collections.singletonList(selection.toString()); + Map selected = mgr.selectProperties(prefix, predicate, project); + Map> returned = new TreeMap<>(); + selected.forEach((k, v) -> returned.put(k, v.export(project == null))); + reactions.add(returned); + } @Override protected void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - security.execute(request, response, PropertyServlet.this::runPut); - } - private void runPut(HttpServletRequest request, - HttpServletResponse response) throws ServletException { final String ns = getNamespace(request.getRequestURI()); final RawStore ms = getMS(); try { @@ -294,11 +276,6 @@ private void runPut(HttpServletRequest request, @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - security.execute(request, response, PropertyServlet.this::runGet); - } - - private void runGet(HttpServletRequest request, - HttpServletResponse response) throws ServletException { final String ns = getNamespace(request.getRequestURI()); final RawStore ms = getMS(); try { @@ -331,42 +308,30 @@ private void runGet(HttpServletRequest request, } } + public static ServletServerBuilder.Descriptor createServlet(Configuration configuration) { + try { + int port = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PORT); + String path = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH); + if (port >= 0 && path != null && !path.isEmpty()) { + ServletSecurity security = new ServletSecurity(configuration); + HttpServlet servlet = security.proxy(new PropertyServlet(configuration)); + return new ServletServerBuilder.Descriptor(port, path, servlet); + } + } catch (Exception io) { + LOGGER.error("Failed to create servlet ", io); + } + return null; + } + /** * Convenience method to start a http server that only serves this servlet. + * * @param conf the configuration * @return the server instance * @throws Exception if servlet initialization fails */ public static Server startServer(Configuration conf) throws Exception { - // no port, no server - int port = MetastoreConf.getIntVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PORT); - if (port < 0) { - return null; - } - String cli = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH); - // HTTP Server - Server server = new Server(); - server.setStopAtShutdown(true); - - // Optional SSL - final SslContextFactory sslContextFactory = ServletSecurity.createSslContextFactory(conf); - final ServerConnector connector = new ServerConnector(server, sslContextFactory); - connector.setPort(port); - connector.setReuseAddress(true); - server.addConnector(connector); - - // Hook the servlet - ServletHandler handler = new ServletHandler(); - server.setHandler(handler); - ServletHolder holder = handler.newServletHolder(Source.EMBEDDED); - holder.setServlet(new PropertyServlet(conf)); // - handler.addServletWithMapping(holder, "/"+cli+"/*"); - server.start(); - if (!server.isStarted()) { - LOGGER.error("unable to start property-maps servlet server, path {}, port {}", cli, port); - } else { - LOGGER.info("started property-maps servlet server on {}", server.getURI()); - } - return server; + return ServletServerBuilder.startServer(LOGGER, conf, PropertyServlet::createServlet); } + } diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java index 76181722ca85..d0d48b04df75 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java @@ -33,9 +33,11 @@ import org.slf4j.LoggerFactory; import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; +import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import java.util.Arrays; import java.util.Enumeration; @@ -43,27 +45,67 @@ /** * Secures servlet processing. + *

This is to be used by servlets that require impersonation through {@link UserGroupInformation#doAs(PrivilegedAction)} + * method when providing service. The servlet request header provides user identification + * that Hadoop{@literal '}s security uses to perform actions, the + * {@link ServletSecurity#execute(HttpServletRequest, HttpServletResponse, ServletSecurity.MethodExecutor)} + * method invokes the executor through a {@link PrivilegedAction} in the expected {@link UserGroupInformation} context. + *

+ * A typical usage in a servlet is the following: + *

+ * ServletSecurity security; // ...
+ * {@literal @}Override protected void doPost(HttpServletRequest request, HttpServletResponse response)
+ *    throws ServletException, IOException {
+ *  security.execute(request, response, this::runPost);
+ * }
+ * private void runPost(HttpServletRequest request, HttpServletResponse response) throws ServletException {
+ * ...
+ * }
+ * 
+ *

+ * As a convenience, instead of embedding the security instance, one can wrap an existing servlet in a proxy that + * will ensure all its service methods are called with the expected {@link UserGroupInformation} . + *

+ *

+ *  HttpServlet myServlet = ...;
+ *  ServletSecurity security = ...: ;
+ *  Servlet ugiServlet = security.proxy(mySerlvet);
+ *  }
+ *  
+ *

+ * This implementation performs user extraction and eventual JWT validation to + * execute (servlet service) methods within the context of the retrieved UserGroupInformation. + *

*/ public class ServletSecurity { private static final Logger LOG = LoggerFactory.getLogger(ServletSecurity.class); static final String X_USER = MetaStoreUtils.USER_NAME_HTTP_HEADER; private final boolean isSecurityEnabled; private final boolean jwtAuthEnabled; - private JWTValidator jwtValidator = null; private final Configuration conf; + private JWTValidator jwtValidator = null; - ServletSecurity(Configuration conf, boolean jwt) { + public ServletSecurity(Configuration conf) { + this(conf, isAuthJwt(conf)); + } + + public ServletSecurity(Configuration conf, boolean jwt) { this.conf = conf; this.isSecurityEnabled = UserGroupInformation.isSecurityEnabled(); this.jwtAuthEnabled = jwt; } + public static boolean isAuthJwt(Configuration configuration) { + String auth = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.PROPERTIES_SERVLET_AUTH); + return "jwt".equalsIgnoreCase(auth); + } + /** * Should be called in Servlet.init() * @throws ServletException if the jwt validator creation throws an exception */ public void init() throws ServletException { - if (jwtAuthEnabled) { + if (jwtAuthEnabled && jwtValidator == null) { try { jwtValidator = new JWTValidator(this.conf); } catch (Exception e) { @@ -73,11 +115,73 @@ public void init() throws ServletException { } } + /** + * Proxy a servlet instance service through this security executor. + */ + public class ProxyServlet extends HttpServlet { + private final HttpServlet delegate; + + ProxyServlet(HttpServlet delegate) { + this.delegate = delegate; + } + + @Override + public void init() throws ServletException { + ServletSecurity.this.init(); + delegate.init(); + } + + @Override + public void service(HttpServletRequest request, HttpServletResponse response) throws IOException { + execute(request, response, delegate::service); + } + + @Override + public String getServletName() { + try { + return delegate.getServletName(); + } catch (IllegalStateException ill) { + return delegate.toString(); + } + } + + @Override + public String getServletInfo() { + return delegate.getServletInfo(); + } + } + + /** + * Creates a proxy servlet. + * @param servlet the servlet to serve within this security context + * @return a servlet instance or null if security initialization fails + */ + public HttpServlet proxy(HttpServlet servlet) { + try { + init(); + } catch (ServletException e) { + LOG.error("Unable to proxy security for servlet {}", servlet.toString(), e); + return null; + } + return new ProxyServlet(servlet); + } + /** * Any http method executor. + *

A method whose signature is similar to + * {@link HttpServlet#doPost(HttpServletRequest, HttpServletResponse)}, + * {@link HttpServlet#doGet(HttpServletRequest, HttpServletResponse)}, + * etc.

*/ @FunctionalInterface - interface MethodExecutor { + public interface MethodExecutor { + /** + * The method to call to secure the execution of a (http) method. + * @param request the request + * @param response the response + * @throws ServletException if the method executor fails + * @throws IOException if the Json in/out fail + */ void execute(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException; } @@ -86,13 +190,12 @@ interface MethodExecutor { * @param request the request * @param response the response * @param executor the method executor - * @throws ServletException if the method executor fails * @throws IOException if the Json in/out fail */ public void execute(HttpServletRequest request, HttpServletResponse response, MethodExecutor executor) - throws ServletException, IOException { + throws IOException { if (LOG.isDebugEnabled()) { - LOG.debug("Logging headers in "+request.getMethod()+" request"); + LOG.debug("Logging headers in {} request", request.getMethod()); Enumeration headerNames = request.getHeaderNames(); while (headerNames.hasMoreElements()) { String headerName = headerNames.nextElement(); @@ -100,9 +203,9 @@ public void execute(HttpServletRequest request, HttpServletResponse response, Me request.getHeader(headerName)); } } + final UserGroupInformation clientUgi; try { String userFromHeader = extractUserName(request, response); - UserGroupInformation clientUgi; // Temporary, and useless for now. Here only to allow this to work on an otherwise kerberized // server. if (isSecurityEnabled || jwtAuthEnabled) { @@ -112,25 +215,25 @@ public void execute(HttpServletRequest request, HttpServletResponse response, Me LOG.info("Creating remote user for: {}", userFromHeader); clientUgi = UserGroupInformation.createRemoteUser(userFromHeader); } - PrivilegedExceptionAction action = () -> { - executor.execute(request, response); - return null; - }; - try { - clientUgi.doAs(action); - } catch (InterruptedException e) { - LOG.error("Exception when executing http request as user: " + clientUgi.getUserName(), e); - Thread.currentThread().interrupt(); - } catch (RuntimeException e) { - LOG.error("Exception when executing http request as user: " + clientUgi.getUserName(), - e); - throw new ServletException(e); - } } catch (HttpAuthenticationException e) { response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); response.getWriter().println("Authentication error: " + e.getMessage()); // Also log the error message on server side LOG.error("Authentication error: ", e); + // no need to go further + return; + } + final PrivilegedExceptionAction action = () -> { + executor.execute(request, response); + return null; + }; + try { + clientUgi.doAs(action); + } catch (InterruptedException e) { + LOG.info("Interrupted when executing http request as user: {}", clientUgi.getUserName(), e); + Thread.currentThread().interrupt(); + } catch (RuntimeException e) { + throw new IOException("Exception when executing http request as user: "+ clientUgi.getUserName(), e); } } @@ -178,7 +281,7 @@ private String extractBearerToken(HttpServletRequest request, * @param conf the configuration * @throws IOException if getting the server principal fails */ - static void loginServerPincipal(Configuration conf) throws IOException { + static void loginServerPrincipal(Configuration conf) throws IOException { // This check is likely pointless, especially with the current state of the http // servlet which respects whatever comes in. Putting this in place for the moment // only to enable testing on an otherwise secure cluster. @@ -193,6 +296,7 @@ static void loginServerPincipal(Configuration conf) throws IOException { LOG.info("Security is not enabled. Not logging in via keytab"); } } + /** * Creates an SSL context factory if configuration states so. * @param conf the configuration @@ -204,24 +308,28 @@ static SslContextFactory createSslContextFactory(Configuration conf) throws IOEx if (!useSsl) { return null; } - String keyStorePath = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.SSL_KEYSTORE_PATH).trim(); + final String keyStorePath = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.SSL_KEYSTORE_PATH).trim(); if (keyStorePath.isEmpty()) { - throw new IllegalArgumentException(MetastoreConf.ConfVars.SSL_KEYSTORE_PATH.toString() + throw new IllegalArgumentException(MetastoreConf.ConfVars.SSL_KEYSTORE_PATH + " Not configured for SSL connection"); } - String keyStorePassword = + final String keyStorePassword = MetastoreConf.getPassword(conf, MetastoreConf.ConfVars.SSL_KEYSTORE_PASSWORD); - String keyStoreType = + final String keyStoreType = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.SSL_KEYSTORE_TYPE).trim(); - String keyStoreAlgorithm = + final String keyStoreAlgorithm = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.SSL_KEYMANAGERFACTORY_ALGORITHM).trim(); - + final String[] excludedProtocols = + MetastoreConf.getVar(conf, MetastoreConf.ConfVars.SSL_PROTOCOL_BLACKLIST).split(","); + if (LOG.isInfoEnabled()) { + LOG.info("HTTP Server SSL: adding excluded protocols: {}", Arrays.toString(excludedProtocols)); + } SslContextFactory factory = new SslContextFactory.Server(); - String[] excludedProtocols = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.SSL_PROTOCOL_BLACKLIST).split(","); - LOG.info("HTTP Server SSL: adding excluded protocols: " + Arrays.toString(excludedProtocols)); factory.addExcludeProtocols(excludedProtocols); - LOG.info("HTTP Server SSL: SslContextFactory.getExcludeProtocols = " - + Arrays.toString(factory.getExcludeProtocols())); + if (LOG.isInfoEnabled()) { + LOG.info("HTTP Server SSL: SslContextFactory.getExcludeProtocols = {}", + Arrays.toString(factory.getExcludeProtocols())); + } factory.setKeyStorePath(keyStorePath); factory.setKeyStorePassword(keyStorePassword); factory.setKeyStoreType(keyStoreType); diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java new file mode 100644 index 000000000000..7323845ae35f --- /dev/null +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java @@ -0,0 +1,347 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hive.metastore; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.eclipse.jetty.server.HttpConfiguration; +import org.eclipse.jetty.server.HttpConnectionFactory; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.server.ServerConnector; +import org.eclipse.jetty.server.handler.ContextHandlerCollection; +import org.eclipse.jetty.server.handler.HandlerCollection; +import org.eclipse.jetty.server.handler.gzip.GzipHandler; +import org.eclipse.jetty.servlet.ServletContextHandler; +import org.eclipse.jetty.servlet.ServletHolder; +import org.eclipse.jetty.util.ssl.SslContextFactory; +import org.eclipse.jetty.util.thread.QueuedThreadPool; +import org.slf4j.Logger; + +import javax.servlet.Servlet; +import javax.servlet.http.HttpServlet; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.function.Function; + +/** + * Helper class to ease creation of embedded Jetty serving servlets on + * different ports. + */ +public class ServletServerBuilder { + /** + * The configuration instance. + */ + private final Configuration configuration; + /** + * Keeping track of descriptors. + */ + private final Map descriptorsMap = new IdentityHashMap<>(); + + /** + * Creates a builder instance. + * + * @param conf the configuration + */ + public ServletServerBuilder(Configuration conf) { + this.configuration = conf; + } + + /** + * Creates a builder. + * + * @param conf the configuration + * @param describe the functions to call that create servlet descriptors + * @return the builder or null if no descriptors + */ + @SafeVarargs + public static ServletServerBuilder builder(Configuration conf, + Function... describe) { + List descriptors = new ArrayList<>(); + Arrays.asList(describe).forEach(functor -> { + ServletServerBuilder.Descriptor descriptor = functor.apply(conf); + if (descriptor != null) { + descriptors.add(descriptor); + } + }); + if (!descriptors.isEmpty()) { + ServletServerBuilder builder = new ServletServerBuilder(conf); + descriptors.forEach(builder::addServlet); + return builder; + } + return null; + } + + /** + * Helper for generic use case. + * + * @param logger the logger + * @param conf the configuration + * @param describe the functions to create descriptors + * @return a server instance + */ + @SafeVarargs + public static Server startServer( + Logger logger, + Configuration conf, + Function... describe) { + return Objects.requireNonNull(builder(conf, describe)).start(logger); + } + + public Configuration getConfiguration() { + return configuration; + } + + /** + * Adds a servlet instance. + *

The servlet port can be shared between servlets; if 0, the system will provide + * a port. If the port is < 0, the system will provide a port dedicated (ie non-shared) + * to the servlet.

+ * + * @param port the servlet port + * @param path the servlet path + * @param servlet a servlet instance + * @return a descriptor + */ + public Descriptor addServlet(int port, String path, HttpServlet servlet) { + Descriptor descriptor = new Descriptor(port, path, servlet); + return addServlet(descriptor); + } + + /** + * Adds a servlet instance. + * + * @param descriptor a descriptor + * @return the descriptor + */ + public Descriptor addServlet(Descriptor descriptor) { + if (descriptor != null) { + descriptorsMap.put(descriptor.getServlet(), descriptor); + } + return descriptor; + } + + /** + * Creates a server instance. + *

Default use configuration to determine thread-pool constants?

+ * + * @return the server instance + */ + private Server createServer() { + final int maxThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.HTTPSERVER_THREADPOOL_MAX); + final int minThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.HTTPSERVER_THREADPOOL_MIN); + final int idleTimeout = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.HTTPSERVER_THREADPOOL_IDLE); + final QueuedThreadPool threadPool = new QueuedThreadPool(maxThreads, minThreads, idleTimeout); + Server server = new Server(threadPool); + server.setStopAtShutdown(true); + return server; + } + + /** + * Creates a server instance and a connector on a given port. + * + * @param server the server instance + * @param sslContextFactory the ssl factory + * @param port the port + * @return the server connector listening to the port + */ + private ServerConnector createConnector(Server server, SslContextFactory sslContextFactory, int port) { + final ServerConnector connector = new ServerConnector(server, sslContextFactory); + connector.setPort(port); + connector.setReuseAddress(true); + HttpConnectionFactory httpFactory = connector.getConnectionFactory(HttpConnectionFactory.class); + // do not leak information + if (httpFactory != null) { + HttpConfiguration httpConf = httpFactory.getHttpConfiguration(); + httpConf.setSendServerVersion(false); + httpConf.setSendXPoweredBy(false); + } + return connector; + } + + /** + * Adds a servlet to its intended servlet context context. + * + * @param handlersMap the map of port to handlers + * @param descriptor the servlet descriptor + */ + private void addServlet(Map handlersMap, Descriptor descriptor) { + final int port = descriptor.getPort(); + final String path = descriptor.getPath(); + final HttpServlet servlet = descriptor.getServlet(); + // if port is < 0, use one for this servlet only + int key = port < 0 ? -1 - handlersMap.size() : port; + ServletContextHandler handler = handlersMap.computeIfAbsent(key, p -> { + ServletContextHandler servletHandler = new ServletContextHandler(ServletContextHandler.NO_SESSIONS); + servletHandler.setContextPath("/"); + servletHandler.setGzipHandler(new GzipHandler()); + return servletHandler; + }); + ServletHolder servletHolder = new ServletHolder(servlet); + servletHolder.setInitParameter("javax.ws.rs.Application", "ServiceListPublic"); + handler.addServlet(servletHolder, "/" + path + "/*"); + } + + /** + * Convenience method to start a http server that serves all configured + * servlets. + * + * @return the server instance or null if no servlet was configured + * @throws Exception if servlet initialization fails + */ + public Server startServer() throws Exception { + // add all servlets + Map handlersMap = new HashMap<>(); + for (Descriptor descriptor : descriptorsMap.values()) { + addServlet(handlersMap, descriptor); + } + final int size = handlersMap.size(); + if (size == 0) { + return null; + } + final Server server = createServer(); + // create the connectors + final SslContextFactory sslFactory = ServletSecurity.createSslContextFactory(configuration); + final ServerConnector[] connectors = new ServerConnector[size]; + final ServletContextHandler[] handlers = new ServletContextHandler[size]; + Iterator> it = handlersMap.entrySet().iterator(); + for (int c = 0; it.hasNext(); ++c) { + Map.Entry entry = it.next(); + int key = entry.getKey(); + int port = Math.max(key, 0); + ServerConnector connector = createConnector(server, sslFactory, port); + connectors[c] = connector; + ServletContextHandler handler = entry.getValue(); + handlers[c] = handler; + // make each servlet context be served only by its dedicated connector + String host = "hms" + c; + connector.setName(host); + handler.setVirtualHosts(new String[]{"@" + host}); + } + // hook the connectors and the handlers + server.setConnectors(connectors); + HandlerCollection portHandler = new ContextHandlerCollection(); + portHandler.setHandlers(handlers); + server.setHandler(portHandler); + // start the server + server.start(); + // collect automatically assigned connector ports + for (int i = 0; i < connectors.length; ++i) { + int port = connectors[i].getLocalPort(); + ServletContextHandler handler = handlers[i]; + ServletHolder[] holders = handler.getServletHandler().getServlets(); + for (ServletHolder holder : holders) { + Servlet servlet = holder.getServletInstance(); + if (servlet != null) { + Descriptor descriptor = descriptorsMap.get(servlet); + if (descriptor != null) { + descriptor.setPort(port); + } + } + } + } + return server; + } + + /** + * Creates and starts the server. + * + * @param logger a logger to output info + * @return the server instance (or null if error) + */ + public Server start(Logger logger) { + try { + Server server = startServer(); + if (server != null) { + if (!server.isStarted()) { + logger.error("Unable to start servlet server on {}", server.getURI()); + } else { + descriptorsMap.values().forEach(descriptor -> logger.info("Started {} servlet on {}:{}", + descriptor.toString(), + descriptor.getPort(), + descriptor.getPath())); + } + } + return server; + } catch (Throwable throwable) { + logger.error("Unable to start servlet server", throwable); + return null; + } + } + + /** + * A descriptor of a servlet. + *

After server is started, unspecified port will be updated to reflect + * what the system allocated.

+ */ + public static class Descriptor { + private final String path; + private final HttpServlet servlet; + private int port; + + /** + * Create a servlet descriptor. + * + * @param port the servlet port (or 0 if system allocated) + * @param path the servlet path + * @param servlet the servlet instance + */ + public Descriptor(int port, String path, HttpServlet servlet) { + this.port = port; + this.path = path; + this.servlet = servlet; + } + + @Override + public String toString() { + String name = null; + try { + name = servlet.getServletName() + ":" + port + "/" + path; + } catch (IllegalStateException ill) { + // ignore, it may happen if servlet config is not set (yet) + } + if (name == null) { + name = servlet.getClass().getSimpleName(); + } + return name + ":" + port + "/" + path; + } + + public int getPort() { + return port; + } + + void setPort(int port) { + this.port = port; + } + + public String getPath() { + return path; + } + + public HttpServlet getServlet() { + return servlet; + } + } +} + diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestServletServerBuilder.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestServletServerBuilder.java new file mode 100644 index 000000000000..e752ef592e26 --- /dev/null +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestServletServerBuilder.java @@ -0,0 +1,233 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore; + +import com.google.gson.Gson; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.Reader; +import java.net.HttpURLConnection; +import java.net.URI; +import java.net.URL; +import java.net.ServerSocket; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.Map; +import java.util.function.Function; +import javax.servlet.Servlet; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; +import org.eclipse.jetty.server.Server; +import org.junit.experimental.categories.Category; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import static org.apache.hadoop.hive.metastore.ServletServerBuilder.Descriptor; + +@Category(MetastoreUnitTest.class) +public class TestServletServerBuilder { + + private static final Logger LOG = LoggerFactory.getLogger(TestServletServerBuilder.class); + + private static Function describeServlet(final Map descriptors, int port, String greeting) { + return configuration -> { + String name = greeting.toLowerCase(); + HttpServlet s1 = new HelloServlet(greeting) { + @Override + public String getServletName() { + return name + "()"; + } + }; + Descriptor descriptor = new Descriptor(port, name, s1); + descriptors.put(s1.getServletName(), descriptor); + return descriptor; + }; + } + + @Test + public void testOne() throws Exception { + Configuration conf = new Configuration(); + // keeping track of what is built + final Map descriptors = new HashMap(); + Function fd1 = describeServlet(descriptors, 0, "ONE"); + Function fd2 = describeServlet(descriptors, 0, "TWO"); + // the 'conventional' way of starting the server + Server server = ServletServerBuilder.startServer(LOG, conf, fd1, fd2); + + Descriptor d1 = descriptors.get("one()"); + Descriptor d2 = descriptors.get("two()"); + // same port for both servlets + Assert.assertTrue(d1.getPort() > 0); + Assert.assertEquals(d1.getPort(), d2.getPort()); + // check + URI uri = URI.create("http://localhost:" + d1.getPort()); + Object one = clientCall(uri.resolve("/one").toURL()); + Assert.assertEquals("ONE", one); + uri = URI.create("http://localhost:" + d2.getPort()); + Object two = clientCall(uri.resolve("/two").toURL()); + Assert.assertEquals("TWO", two); + server.stop(); + } + + @Test + public void testOnePort() throws Exception { + int port; + try (ServerSocket server0 = new ServerSocket(0)) { + port = server0.getLocalPort(); + } catch (IOException xio) { + // cant run test if can not get free port + return; + } + onePort(port); + } + + @Test + public void testOnePortAuto() throws Exception { + onePort(0); + } + + void onePort(int port) throws Exception { + Configuration conf = new Configuration(); + ServletServerBuilder ssb = new ServletServerBuilder(conf); + HttpServlet s1 = new HelloServlet("ONE"); + HttpServlet s2 = new HelloServlet("TWO"); + Descriptor d1 = ssb.addServlet(port, "one", s1); + Descriptor d2 = ssb.addServlet(port, "two", s2); + Server server = ssb.startServer(); + // same port for both servlets + Assert.assertTrue(d1.getPort() > 0); + Assert.assertEquals(d1.getPort(), d2.getPort()); + // check + URI uri = URI.create("http://localhost:" + d1.getPort()); + Object one = clientCall(uri.resolve("/one").toURL()); + Assert.assertEquals("ONE", one); + uri = URI.create("http://localhost:" + d2.getPort()); + Object two = clientCall(uri.resolve("/two").toURL()); + Assert.assertEquals("TWO", two); + server.stop(); + } + + @Test + public void testTwoPorts() throws Exception { + runTwoPorts(-1, -2); + } + + @Test + public void testTwoPortsAuto() throws Exception { + int p0, p1; + try (ServerSocket server0 = new ServerSocket(0); ServerSocket server1 = new ServerSocket(0)) { + p0 = server0.getLocalPort(); + p1 = server1.getLocalPort(); + } catch (IOException xio) { + // cant do test if can not get port + return; + } + runTwoPorts(p0, p1); + } + + void runTwoPorts(int p1, int p2) throws Exception { + Configuration conf = new Configuration(); + ServletServerBuilder ssb = new ServletServerBuilder(conf); + HttpServlet s1 = new HelloServlet("ONE"); + HttpServlet s2 = new HelloServlet("TWO"); + Descriptor d1 = ssb.addServlet(p1, "one", s1); + Descriptor d2 = ssb.addServlet(p2, "two", s2); + Map mappings = new IdentityHashMap<>(); + Server server = ssb.startServer(); + // different port for both servlets + Assert.assertNotEquals(d1.getPort(), d2.getPort()); + + URI uri = URI.create("http://localhost:" + d1.getPort()); + Object one = clientCall(uri.resolve("/one").toURL()); + Assert.assertEquals("ONE", one); + // fail, not found + Object o404 = clientCall(uri.resolve("/two").toURL()); + Assert.assertEquals(404, o404); + uri = URI.create("http://localhost:" + d2.getPort()); + Object two = clientCall(uri.resolve("/two").toURL()); + Assert.assertEquals("TWO", two); + // fail, not found + o404 = clientCall(uri.resolve("/one").toURL()); + Assert.assertEquals(404, o404); + server.stop(); + } + + static int findFreePort() throws IOException { + try (ServerSocket server0 = new ServerSocket(0)) { + return server0.getLocalPort(); + } + } + + static int find2FreePort() throws IOException { + try (ServerSocket socket0 = new ServerSocket(0)) { + return socket0.getLocalPort(); + } + } + + /** + * Performs a Json client call. + * + * @param url the url + * @return the result the was returned through Json + * @throws IOException if marshalling the request/response fail + */ + static Object clientCall(URL url) throws IOException { + HttpURLConnection con = (HttpURLConnection) url.openConnection(); + con.setRequestMethod("GET"); + con.setRequestProperty("Content-Type", "application/json"); + con.setRequestProperty("Accept", "application/json"); + con.setDoOutput(true); + int responseCode = con.getResponseCode(); + if (responseCode == HttpServletResponse.SC_OK) { + try (Reader reader = new BufferedReader( + new InputStreamReader(con.getInputStream(), StandardCharsets.UTF_8))) { + return new Gson().fromJson(reader, Object.class); + } + } + return responseCode; + } + +} + +class HelloServlet extends HttpServlet { + + final String greeting; + + public HelloServlet() { + this("Hello"); + } + + public HelloServlet(String greeting) { + this.greeting = greeting; + } + + @Override + protected void doGet(HttpServletRequest request, + HttpServletResponse response) throws ServletException, IOException { + response.setContentType("application/json"); + response.setStatus(HttpServletResponse.SC_OK); + response.getWriter().println(greeting); + } +} diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSDirectTest.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSDirectTest.java index 7c3c77451649..3e7c0cd2e600 100644 --- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSDirectTest.java +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSDirectTest.java @@ -17,32 +17,33 @@ */ package org.apache.hadoop.hive.metastore.properties; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.TreeMap; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.HMSHandler; import org.apache.hadoop.hive.metastore.ObjectStore; import org.apache.hadoop.hive.metastore.Warehouse; +import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder; -import org.junit.Assert; -import org.junit.Test; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.TreeMap; - import static org.apache.hadoop.hive.metastore.properties.PropertyType.DATETIME; import static org.apache.hadoop.hive.metastore.properties.PropertyType.DOUBLE; import static org.apache.hadoop.hive.metastore.properties.PropertyType.INTEGER; import static org.apache.hadoop.hive.metastore.properties.PropertyType.STRING; +import org.junit.Assert; +import org.junit.Test; +import org.junit.experimental.categories.Category; /** * In-process property manager test. */ +@Category(MetastoreUnitTest.class) public class HMSDirectTest extends HMSTestBase { protected ObjectStore objectStore = null; static Random RND = new Random(20230424); diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java index 75f670409225..f196d66c33b3 100644 --- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java @@ -18,8 +18,25 @@ import com.google.gson.Gson; import com.google.gson.GsonBuilder; +import java.io.BufferedReader; +import java.io.DataOutputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.Reader; +import java.net.HttpURLConnection; +import java.net.URI; +import java.net.URL; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import javax.servlet.http.HttpServletResponse; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.PropertyServlet; +import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; @@ -34,56 +51,52 @@ import org.apache.http.message.BasicNameValuePair; import org.eclipse.jetty.server.Server; import org.junit.Assert; +import org.junit.Before; import org.junit.Test; +import org.junit.experimental.categories.Category; -import javax.servlet.http.HttpServletResponse; -import java.io.BufferedReader; -import java.io.DataOutputStream; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.Reader; -import java.net.HttpURLConnection; -import java.net.URI; -import java.net.URL; -import java.nio.charset.Charset; -import java.nio.charset.StandardCharsets; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - +@Category(MetastoreUnitTest.class) public class HMSServletTest extends HMSTestBase { - protected static final String CLI = "hmscli"; + String path = null; Server servletServer = null; - int sport = -1; - + int servletPort = -1; + + @Before + public void setUp() throws Exception { + super.setUp(); + path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH); + } - @Override protected int createServer(Configuration conf) throws Exception { + @Override + protected int createServer(Configuration conf) throws Exception { if (servletServer == null) { servletServer = PropertyServlet.startServer(conf); if (servletServer == null || !servletServer.isStarted()) { Assert.fail("http server did not start"); } - sport = servletServer.getURI().getPort(); + servletPort = servletServer.getURI().getPort(); } - return sport; + return servletPort; } /** * Stops the server. * @param port the server port */ - @Override protected void stopServer(int port) throws Exception { + @Override + protected void stopServer(int port) throws Exception { if (servletServer != null) { servletServer.stop(); servletServer = null; - sport = -1; + servletPort = -1; } } + @Override protected PropertyClient createClient(Configuration conf, int sport) throws Exception { - URL url = new URL("http://hive@localhost:" + sport + "/" + CLI + "/" + NS); + String path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH); + URL url = new URL("http://hive@localhost:" + sport + "/" + path + "/" + NS); String jwt = generateJWT(); return new JSonClient(jwt, url); } @@ -143,7 +156,7 @@ public Map getProperties(List selection) { @Test public void testServletEchoA() throws Exception { - URL url = new URL("http://hive@localhost:" + sport + "/" + CLI + "/" + NS); + URL url = new URL("http://hive@localhost:" + servletPort + "/" + path + "/" + NS); Map json = Collections.singletonMap("method", "echo"); String jwt = generateJWT(); // succeed @@ -175,8 +188,8 @@ public void testProperties0() throws Exception { .setScheme("http") .setUserInfo("hive") .setHost("localhost") - .setPort(sport) - .setPath("/" + CLI + "/" + NS) + .setPort(servletPort) + .setPath("/" + path + "/" + NS) .setParameters(nvp) .build(); HttpGet get = new HttpGet(uri); @@ -292,7 +305,7 @@ public static Object clientCall(String jwt, URL url, String method, Object arg) * @throws Exception */ private HttpPost createPost(String jwt, String msgBody) { - HttpPost method = new HttpPost("http://hive@localhost:" + sport + "/" + CLI + "/" + NS); + HttpPost method = new HttpPost("http://hive@localhost:" + servletPort + "/" + path + "/" + NS); method.addHeader("Authorization", "Bearer " + jwt); method.addHeader("Content-Type", "application/json"); method.addHeader("Accept", "application/json"); diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1.java index db15d52e12d4..b1c8b803dff9 100644 --- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1.java +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1.java @@ -18,7 +18,19 @@ import com.google.gson.Gson; import com.google.gson.GsonBuilder; +import java.io.Closeable; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.Reader; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import javax.servlet.http.HttpServletResponse; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; @@ -29,30 +41,23 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.HttpClients; +import org.junit.experimental.categories.Category; -import javax.servlet.http.HttpServletResponse; -import java.io.Closeable; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.Reader; -import java.net.URL; -import java.nio.charset.Charset; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - +@Category(MetastoreUnitTest.class) public class HMSServletTest1 extends HMSServletTest { @Override public void tearDown() throws Exception { if (client instanceof AutoCloseable) { ((AutoCloseable) client).close(); + client = null; } super.tearDown(); } @Override protected PropertyClient createClient(Configuration conf, int sport) throws Exception { - URL url = new URL("http://hive@localhost:" + sport + "/" + CLI + "/" + NS); + String path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH); + URL url = new URL("http://hive@localhost:" + sport + "/" + path + "/" + NS); String jwt = generateJWT(); return new JSonHttpClient(jwt, url.toString()); } diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java index 5ff45d90dd82..fd58d53e1f19 100644 --- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java @@ -16,22 +16,23 @@ */ package org.apache.hadoop.hive.metastore.properties; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.ok; +import java.nio.file.Files; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; +import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge; import org.junit.Assert; - -import java.nio.file.Files; - -import static com.github.tomakehurst.wiremock.client.WireMock.get; -import static com.github.tomakehurst.wiremock.client.WireMock.ok; +import org.junit.experimental.categories.Category; /** * Test using the servlet server created by the MetaStore and * the client based on Apache HttpClient. */ +@Category(MetastoreUnitTest.class) public class HMSServletTest1A extends HMSServletTest1 { protected int thriftPort; @@ -45,12 +46,12 @@ protected int createServer(Configuration conf) throws Exception { .willReturn(ok() .withBody(Files.readAllBytes(jwtVerificationJWKSFile.toPath())))); thriftPort = MetaStoreTestUtils.startMetaStoreWithRetry(HadoopThriftAuthBridge.getBridge(), conf); - servletServer = HiveMetaStore.getPropertyServer(); + servletServer = HiveMetaStore.getServletServer(); if (servletServer == null || !servletServer.isStarted()) { Assert.fail("http server did not start"); } - sport = servletServer.getURI().getPort(); - return sport; + servletPort = HiveMetaStore.getPropertyServletPort(); + return servletPort; } @Override diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java index 10a54457ab12..3a8fb16028f0 100644 --- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java @@ -16,20 +16,22 @@ */ package org.apache.hadoop.hive.metastore.properties; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.ok; +import java.nio.file.Files; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; +import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge; import org.junit.Assert; -import java.nio.file.Files; - -import static com.github.tomakehurst.wiremock.client.WireMock.get; -import static com.github.tomakehurst.wiremock.client.WireMock.ok; +import org.junit.experimental.categories.Category; /** * Test using the servlet server created by the MetaStore. */ +@Category(MetastoreUnitTest.class) public class HMSServletTestA extends HMSServletTest { protected int thriftPort; @@ -43,12 +45,12 @@ protected int createServer(Configuration conf) throws Exception { .willReturn(ok() .withBody(Files.readAllBytes(jwtVerificationJWKSFile.toPath())))); thriftPort = MetaStoreTestUtils.startMetaStoreWithRetry(HadoopThriftAuthBridge.getBridge(), conf); - servletServer = HiveMetaStore.getPropertyServer(); + servletServer = HiveMetaStore.getServletServer(); if (servletServer == null || !servletServer.isStarted()) { Assert.fail("http server did not start"); } - sport = servletServer.getURI().getPort(); - return sport; + servletPort = HiveMetaStore.getPropertyServletPort(); + return servletPort; } @Override diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java index 30635e8bbeee..cded98bd07cc 100644 --- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hive.metastore.properties; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.ok; import com.github.tomakehurst.wiremock.junit.WireMockRule; import com.nimbusds.jose.JWSAlgorithm; import com.nimbusds.jose.JWSHeader; @@ -25,25 +27,6 @@ import com.nimbusds.jose.jwk.RSAKey; import com.nimbusds.jwt.JWTClaimsSet; import com.nimbusds.jwt.SignedJWT; -import org.apache.commons.io.IOUtils; -import org.apache.commons.jexl3.JxltEngine; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; -import org.apache.hadoop.hive.metastore.ObjectStore; -import org.apache.hadoop.hive.metastore.TestObjectStore; - -import static com.github.tomakehurst.wiremock.client.WireMock.get; -import static com.github.tomakehurst.wiremock.client.WireMock.ok; -import static org.apache.hadoop.hive.metastore.properties.HMSPropertyManager.MaintenanceOpStatus; -import static org.apache.hadoop.hive.metastore.properties.HMSPropertyManager.MaintenanceOpType; -import org.apache.hadoop.hive.metastore.conf.MetastoreConf; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.File; import java.io.IOException; import java.io.StringWriter; @@ -58,16 +41,29 @@ import java.util.TreeMap; import java.util.UUID; import java.util.concurrent.TimeUnit; - -import static org.apache.hadoop.hive.metastore.properties.HMSPropertyManager.JEXL; +import org.apache.commons.io.IOUtils; +import org.apache.commons.jexl3.JxltEngine; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; +import org.apache.hadoop.hive.metastore.ObjectStore; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import static org.apache.hadoop.hive.metastore.properties.HMSPropertyManager.MAINTENANCE_OPERATION; import static org.apache.hadoop.hive.metastore.properties.HMSPropertyManager.MAINTENANCE_STATUS; +import org.apache.hadoop.hive.metastore.properties.HMSPropertyManager.MaintenanceOpStatus; +import org.apache.hadoop.hive.metastore.properties.HMSPropertyManager.MaintenanceOpType; +import static org.apache.hadoop.hive.metastore.properties.PropertyManager.JEXL; import static org.apache.hadoop.hive.metastore.properties.PropertyType.BOOLEAN; import static org.apache.hadoop.hive.metastore.properties.PropertyType.DATETIME; import static org.apache.hadoop.hive.metastore.properties.PropertyType.DOUBLE; import static org.apache.hadoop.hive.metastore.properties.PropertyType.INTEGER; import static org.apache.hadoop.hive.metastore.properties.PropertyType.JSON; import static org.apache.hadoop.hive.metastore.properties.PropertyType.STRING; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.ClassRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public abstract class HMSTestBase { protected static final String baseDir = System.getProperty("basedir"); @@ -87,12 +83,12 @@ public abstract class HMSTestBase { /** * Abstract the property client access on a given namespace. */ - interface PropertyClient { + protected interface PropertyClient { boolean setProperties(Map properties); Map> getProperties(String mapPrefix, String mapPredicate, String... selection) throws IOException; } - interface HttpPropertyClient extends PropertyClient { + protected interface HttpPropertyClient extends PropertyClient { default Map getProperties(List selection) throws IOException { throw new UnsupportedOperationException("not implemented in " + this.getClass()); } @@ -100,7 +96,7 @@ default Map getProperties(List selection) throws IOExcep protected Configuration conf = null; - protected static final Logger LOG = LoggerFactory.getLogger(TestObjectStore.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(HMSTestBase.class); static Random RND = new Random(20230424); protected String NS;// = "hms" + RND.nextInt(100); protected PropertyClient client; @@ -111,7 +107,6 @@ public void setUp() throws Exception { NS = "hms" + RND.nextInt(100); conf = MetastoreConf.newMetastoreConf(); MetaStoreTestUtils.setConfForStandloneMode(conf); - MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST, true); // Events that get cleaned happen in batches of 1 to exercise batching code MetastoreConf.setLongVar(conf, MetastoreConf.ConfVars.EVENT_CLEAN_MAX_EVENTS, 1L); @@ -184,9 +179,9 @@ private static String generateJWT(String user, Path keyFile, long lifeTimeMillis /** * Creates and starts the server. - * @param conf + * @param conf the configuration * @return the server port - * @throws Exception + * @throws Exception if creation fails */ protected int createServer(Configuration conf) throws Exception { return 0; @@ -195,7 +190,7 @@ protected int createServer(Configuration conf) throws Exception { /** * Stops the server. * @param port the server port - * @throws Exception + * @throws Exception if stopping the server fails */ protected void stopServer(int port) throws Exception { // nothing @@ -203,13 +198,15 @@ protected void stopServer(int port) throws Exception { /** * Creates a client. + * @param conf the configuration + * @param port the servlet port * @return the client instance - * @throws Exception + * @throws Exception if client creation fails */ protected abstract PropertyClient createClient(Configuration conf, int port) throws Exception; - public void runOtherProperties0(PropertyClient client) throws Exception { + void runOtherProperties0(PropertyClient client) throws Exception { Map ptyMap = createProperties0(); boolean commit = client.setProperties(ptyMap); Assert.assertTrue(commit); @@ -236,7 +233,7 @@ static Map createProperties0() { try { String json = IOUtils.toString( HMSDirectTest.class.getResourceAsStream("payload.json"), - "UTF-8" + StandardCharsets.UTF_8 ); JxltEngine JXLT = JEXL.createJxltEngine(); JxltEngine.Template jsonjexl = JXLT.createTemplate(json, "table", "delta", "g"); @@ -265,7 +262,7 @@ static Map createProperties0() { } } - public void runOtherProperties1(PropertyClient client) throws Exception { + void runOtherProperties1(PropertyClient client) throws Exception { Map ptyMap = createProperties1(); boolean commit = client.setProperties(ptyMap); Assert.assertTrue(commit); @@ -278,12 +275,11 @@ public void runOtherProperties1(PropertyClient client) throws Exception { HttpPropertyClient httpClient = (HttpPropertyClient) client; // get fillfactors using getProperties, create args array from previous result List keys = new ArrayList<>(maps.keySet()); - for (int k = 0; k < keys.size(); ++k) { - keys.set(k, keys.get(k) + ".fillFactor"); - } + keys.replaceAll(s -> s + ".fillFactor"); Object values = httpClient.getProperties(keys); Assert.assertTrue(values instanceof Map); - Map getm = (Map) values; + @SuppressWarnings("unchecked") + final Map getm = (Map) values; for (Map.Entry> entry : maps.entrySet()) { Map map0v = entry.getValue(); Assert.assertEquals(map0v.get("fillFactor"), getm.get(entry.getKey() + ".fillFactor")); diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSThriftTest.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSThriftTest.java index 33354ad17b54..b7fa65d6d771 100644 --- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSThriftTest.java +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSThriftTest.java @@ -17,17 +17,19 @@ */ package org.apache.hadoop.hive.metastore.properties; +import java.io.IOException; +import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; +import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge; import org.apache.thrift.TException; import org.junit.Test; +import org.junit.experimental.categories.Category; -import java.io.IOException; -import java.util.Map; - +@Category(MetastoreUnitTest.class) public class HMSThriftTest extends HMSTestBase { /** * A Thrift based property client. @@ -67,16 +69,11 @@ public Map> getProperties(String mapPrefix, String m MetaStoreTestUtils.close(port); } - /** - * Creates a client. - * @return the client instance - * @throws Exception - */ @Override protected PropertyClient createClient(Configuration conf, int port) throws Exception { MetastoreConf.setVar(conf, MetastoreConf.ConfVars.THRIFT_URIS, "http://localhost:" + port); MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.EXECUTE_SET_UGI, false); - HiveMetaStoreClient client = new HiveMetaStoreClient(conf); - return new ThriftPropertyClient(NS, client); + HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(conf); + return new ThriftPropertyClient(NS, hiveClient); } @Test diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/PropertyStoreTest.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/PropertyStoreTest.java index 50ab770aaabf..1ef1c2119194 100644 --- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/PropertyStoreTest.java +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/PropertyStoreTest.java @@ -18,12 +18,14 @@ package org.apache.hadoop.hive.metastore.properties; import com.google.common.base.Supplier; +import java.nio.charset.StandardCharsets; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.HMSHandler; import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; import org.apache.hadoop.hive.metastore.ObjectStore; import org.apache.hadoop.hive.metastore.TestObjectStore; import org.apache.hadoop.hive.metastore.Warehouse; +import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.model.MMetastoreDBProperties; @@ -31,11 +33,11 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.junit.experimental.categories.Category; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.nio.charset.StandardCharsets; - +@Category(MetastoreUnitTest.class) public class PropertyStoreTest { private ObjectStore objectStore = null; private Configuration conf; diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml index 6b5cb3e0e542..ae8331642b7e 100644 --- a/standalone-metastore/pom.xml +++ b/standalone-metastore/pom.xml @@ -29,6 +29,7 @@ metastore-common metastore-server metastore-tools + metastore-rest-catalog 4.1.0-SNAPSHOT @@ -120,6 +121,7 @@ ${basedir}/src/gen/thrift -I ${thrift.home} -strict --gen java:beans,generated_annotations=undated --gen cpp --gen php --gen py --gen rb + 2.32.0 2024-01-01T00:00:00Z