diff --git a/authorizations/authorization-chain/build.gradle.kts b/authorizations/authorization-chain/build.gradle.kts new file mode 100644 index 00000000000..f70c45bf3cd --- /dev/null +++ b/authorizations/authorization-chain/build.gradle.kts @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +description = "authorization-chain" + +plugins { + `maven-publish` + id("java") + id("idea") +} + +val scalaVersion: String = project.properties["scalaVersion"] as? String ?: extra["defaultScalaVersion"].toString() +val sparkVersion: String = libs.versions.spark35.get() +val kyuubiVersion: String = libs.versions.kyuubi4paimon.get() +val sparkMajorVersion: String = sparkVersion.substringBeforeLast(".") + +dependencies { + implementation(project(":api")) { + exclude(group = "*") + } + implementation(project(":core")) { + exclude(group = "*") + } + implementation(project(":common")) { + exclude(group = "*") + } + + implementation(libs.bundles.log4j) + implementation(libs.commons.lang3) + implementation(libs.guava) + implementation(libs.javax.jaxb.api) { + exclude("*") + } + implementation(libs.javax.ws.rs.api) + implementation(libs.jettison) + compileOnly(libs.lombok) + implementation(libs.rome) + + testImplementation(project(":core", "testArtifacts")) + testImplementation(project(":clients:client-java")) + testImplementation(project(":server")) + testImplementation(project(":catalogs:catalog-common")) + testImplementation(project(":integration-test-common", "testArtifacts")) + testImplementation(project(":authorizations:authorization-ranger")) + testImplementation(project(":authorizations:authorization-ranger", "testArtifacts")) + testImplementation(libs.junit.jupiter.api) + testImplementation(libs.mockito.core) + testImplementation(libs.testcontainers) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.mysql.driver) + testImplementation(libs.postgresql.driver) + testImplementation(libs.ranger.intg) { + exclude("org.apache.hadoop", "hadoop-common") + exclude("org.apache.hive", "hive-storage-api") + exclude("org.apache.lucene") + exclude("org.apache.solr") + exclude("org.apache.kafka") + exclude("org.elasticsearch") + exclude("org.elasticsearch.client") + exclude("org.elasticsearch.plugin") + exclude("org.apache.ranger", "ranger-plugins-audit") + exclude("org.apache.ranger", "ranger-plugins-cred") + exclude("org.apache.ranger", "ranger-plugin-classloader") + exclude("net.java.dev.jna") + exclude("javax.ws.rs") + exclude("org.eclipse.jetty") + } + testImplementation("org.apache.spark:spark-hive_$scalaVersion:$sparkVersion") + testImplementation("org.apache.spark:spark-sql_$scalaVersion:$sparkVersion") { + exclude("org.apache.avro") + exclude("org.apache.hadoop") + exclude("org.apache.zookeeper") + exclude("io.dropwizard.metrics") + exclude("org.rocksdb") + } + testImplementation("org.apache.kyuubi:kyuubi-spark-authz-shaded_$scalaVersion:$kyuubiVersion") { + exclude("com.sun.jersey") + } + testImplementation(libs.hadoop3.client) + testImplementation(libs.hadoop3.common) { + exclude("com.sun.jersey") + exclude("javax.servlet", "servlet-api") + } + testImplementation(libs.hadoop3.hdfs) { + exclude("com.sun.jersey") + exclude("javax.servlet", "servlet-api") + exclude("io.netty") + } +} + +tasks { + val runtimeJars by registering(Copy::class) { + from(configurations.runtimeClasspath) + into("build/libs") + } + + val copyAuthorizationLibs by registering(Copy::class) { + dependsOn("jar", runtimeJars) + from("build/libs") { + exclude("guava-*.jar") + exclude("log4j-*.jar") + exclude("slf4j-*.jar") + } + into("$rootDir/distribution/package/authorizations/chain/libs") + } + + register("copyLibAndConfig", Copy::class) { + dependsOn(copyAuthorizationLibs) + } + + jar { + dependsOn(runtimeJars) + } +} + +tasks.test { + dependsOn(":catalogs:catalog-hive:jar", ":catalogs:catalog-hive:runtimeJars", ":authorizations:authorization-ranger:jar", ":authorizations:authorization-ranger:runtimeJars") + + val skipITs = project.hasProperty("skipITs") + if (skipITs) { + // Exclude integration tests + exclude("**/integration/test/**") + } else { + dependsOn(tasks.jar) + } +} diff --git a/authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorization.java b/authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorization.java new file mode 100644 index 00000000000..7e5d37dd11a --- /dev/null +++ b/authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorization.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.chain; + +import java.util.Map; +import org.apache.gravitino.connector.authorization.AuthorizationPlugin; +import org.apache.gravitino.connector.authorization.BaseAuthorization; + +/** Implementation of a Chain authorization in Gravitino. */ +public class ChainAuthorization extends BaseAuthorization { + @Override + public String shortName() { + return "chain"; + } + + @Override + protected AuthorizationPlugin newPlugin( + String metalake, String catalogProvider, Map config) { + switch (catalogProvider) { + case "hive": + case "test": // For testing purposes + return ChainAuthorizationPlugin.getInstance(metalake, catalogProvider, config); + default: + throw new IllegalArgumentException("Unknown catalog provider: " + catalogProvider); + } + } +} diff --git a/authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorizationBase.java b/authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorizationBase.java new file mode 100644 index 00000000000..bfbf130ebf0 --- /dev/null +++ b/authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorizationBase.java @@ -0,0 +1,310 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.chain; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.apache.gravitino.MetadataObject; +import org.apache.gravitino.authorization.Group; +import org.apache.gravitino.authorization.MetadataObjectChange; +import org.apache.gravitino.authorization.Owner; +import org.apache.gravitino.authorization.Role; +import org.apache.gravitino.authorization.RoleChange; +import org.apache.gravitino.authorization.User; +import org.apache.gravitino.connector.AuthorizationPropertiesMeta; +import org.apache.gravitino.connector.WildcardPropertiesMeta; +import org.apache.gravitino.connector.authorization.AuthorizationPlugin; +import org.apache.gravitino.connector.authorization.BaseAuthorization; +import org.apache.gravitino.exceptions.AuthorizationPluginException; +import org.apache.gravitino.utils.MapUtils; + +/** Chain call authorization plugin. */ +public abstract class ChainAuthorizationBase implements AuthorizationPlugin { + private List plugins = Lists.newArrayList(); + private final String metalake; + + ChainAuthorizationBase(String metalake, String catalogProvider, Map config) { + this.metalake = metalake; + initPlugins(config); + } + + private void initPlugins(Map config) { + String chainPlugins = + config.get(AuthorizationPropertiesMeta.getInstance().wildcardNodePropertyKey()); + Map chainConfig = + MapUtils.getFilteredMap( + config, + key -> + key.toString() + .startsWith( + AuthorizationPropertiesMeta.getInstance().wildcardNodePropertyKey())); + Arrays.stream( + chainPlugins.split(WildcardPropertiesMeta.Constants.WILDCARD_CONFIG_VALUES_SPLITTER)) + .forEach( + pluginName -> { + // Get catalog provider for each plugin + String catalogProviderKey = + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue( + pluginName, AuthorizationPropertiesMeta.getChainCatalogProviderKey()); + Preconditions.checkArgument( + config.containsKey(catalogProviderKey), + "Missing catalog provider for plugin: " + pluginName); + String catalogProvider = config.get(catalogProviderKey); + Preconditions.checkArgument( + !catalogProvider.isEmpty(), + "Catalog provider for plugin: " + pluginName + " is empty"); + // Get authorization provider for each plugin + String providerKey = + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue( + pluginName, AuthorizationPropertiesMeta.getChainProviderKey()); + Preconditions.checkArgument( + config.containsKey(providerKey), "Missing provider for plugin: " + pluginName); + String authProvider = config.get(providerKey); + Preconditions.checkArgument( + !authProvider.isEmpty(), "Provider for plugin: " + pluginName + " is empty"); + // Convert chain config to plugin config + Map pluginConfig = + chainConfig.entrySet().stream() + .filter( + entry -> + entry + .getKey() + .startsWith( + String.format( + "%s.%s", + AuthorizationPropertiesMeta.getInstance() + .wildcardNodePropertyKey(), + pluginName))) + .collect( + Collectors.toMap( + entry -> + AuthorizationPropertiesMeta.chainKeyToPluginKey( + entry.getKey(), pluginName), + Map.Entry::getValue)); + AuthorizationPlugin authorizationPlugin = + BaseAuthorization.createAuthorization( + this.getClass().getClassLoader(), authProvider) + .plugin(metalake, catalogProvider, pluginConfig); + plugins.add(authorizationPlugin); + }); + } + + @VisibleForTesting + public final List getPlugins() { + return plugins; + } + + @Override + public void close() throws IOException { + for (AuthorizationPlugin plugin : plugins) { + plugin.close(); + } + } + + @Override + public Boolean onMetadataUpdated(MetadataObjectChange... changes) + throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onMetadataUpdated(changes); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onRoleCreated(Role role) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onRoleCreated(role); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onRoleAcquired(Role role) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onRoleAcquired(role); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onRoleDeleted(Role role) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onRoleDeleted(role); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onRoleUpdated(Role role, RoleChange... changes) + throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onRoleUpdated(role, changes); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onGrantedRolesToUser(List roles, User user) + throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onGrantedRolesToUser(roles, user); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onRevokedRolesFromUser(List roles, User user) + throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onRevokedRolesFromUser(roles, user); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onGrantedRolesToGroup(List roles, Group group) + throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onGrantedRolesToGroup(roles, group); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onRevokedRolesFromGroup(List roles, Group group) + throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onRevokedRolesFromGroup(roles, group); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onUserAdded(User user) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onUserAdded(user); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onUserRemoved(User user) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onUserRemoved(user); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onUserAcquired(User user) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onUserAcquired(user); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onGroupAdded(Group group) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onGroupAdded(group); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onGroupRemoved(Group group) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onGroupRemoved(group); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onGroupAcquired(Group group) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onGroupAcquired(group); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onOwnerSet(MetadataObject metadataObject, Owner preOwner, Owner newOwner) + throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onOwnerSet(metadataObject, preOwner, newOwner); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } +} diff --git a/authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorizationPlugin.java b/authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorizationPlugin.java new file mode 100644 index 00000000000..338ee01b924 --- /dev/null +++ b/authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorizationPlugin.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.chain; + +import java.util.Map; + +/** Chain authorization operations plugin class.
*/ +public class ChainAuthorizationPlugin extends ChainAuthorizationBase { + private static volatile ChainAuthorizationPlugin instance = null; + + public static synchronized ChainAuthorizationPlugin getInstance( + String metalake, String catalogProvider, Map config) { + if (instance == null) { + synchronized (ChainAuthorizationPlugin.class) { + if (instance == null) { + instance = new ChainAuthorizationPlugin(metalake, catalogProvider, config); + } + } + } + return instance; + } + + private ChainAuthorizationPlugin( + String metalake, String catalogProvider, Map config) { + super(metalake, catalogProvider, config); + } +} diff --git a/authorizations/authorization-chain/src/main/resources/META-INF/services/org.apache.gravitino.connector.authorization.AuthorizationProvider b/authorizations/authorization-chain/src/main/resources/META-INF/services/org.apache.gravitino.connector.authorization.AuthorizationProvider new file mode 100644 index 00000000000..c4b35cb24df --- /dev/null +++ b/authorizations/authorization-chain/src/main/resources/META-INF/services/org.apache.gravitino.connector.authorization.AuthorizationProvider @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +org.apache.gravitino.authorization.chain.ChainAuthorization \ No newline at end of file diff --git a/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/TestAuthorizationPropertiesMeta.java b/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/TestAuthorizationPropertiesMeta.java new file mode 100644 index 00000000000..7a33b5b7f5e --- /dev/null +++ b/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/TestAuthorizationPropertiesMeta.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.chain; + +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.apache.gravitino.connector.AuthorizationPropertiesMeta; +import org.apache.gravitino.connector.WildcardPropertiesMeta; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TestAuthorizationPropertiesMeta { + private static final Logger LOG = LoggerFactory.getLogger(TestAuthorizationPropertiesMeta.class); + + @Test + public void checkChainPropertyDefines() throws IllegalAccessException { + Map mapVariable = + getPublicStaticVariableFromClass(AuthorizationPropertiesMeta.class); + List ignoreChecks = + Arrays.asList( + AuthorizationPropertiesMeta.CHAIN_CATALOG_PROVIDER, + AuthorizationPropertiesMeta.CHAIN_PROVIDER, + AuthorizationPropertiesMeta.getInstance().wildcardNodePropertyKey()); + mapVariable.values().stream() + .forEach( + value -> { + if (!ignoreChecks.stream().anyMatch(ignore -> value.equals(ignore)) + && value.contains( + AuthorizationPropertiesMeta.getInstance().secondNodePropertyKey())) { + String pluginPropValue = + value.replace( + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue(WildcardPropertiesMeta.Constants.WILDCARD, ""), + AuthorizationPropertiesMeta.getInstance().generateFirstNodePropertyKey("")); + LOG.info("Checking variable: {}, pluginPropValue: {}", value, pluginPropValue); + Assertions.assertTrue( + mapVariable.values().contains(pluginPropValue), + String.format("Variable %s is not defined in the class", value)); + } + }); + } + + /** + * Get all public static member variables from a class + * + * @param clazz The class to get public member variables from + * @return A map of the Map + */ + private Map getPublicStaticVariableFromClass(Class clazz) + throws IllegalAccessException { + Field[] fields = clazz.getFields(); + Map publicStaticFields = new HashMap<>(); + + for (Field field : fields) { + if (Modifier.isPublic(field.getModifiers()) + && Modifier.isStatic(field.getModifiers()) + && field.getDeclaringClass().equals(clazz) + && field.getType().equals(String.class)) { + publicStaticFields.put(field.getName(), field.get(null).toString()); + } + } + return publicStaticFields; + } +} diff --git a/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/TestChainAuthorization.java b/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/TestChainAuthorization.java new file mode 100644 index 00000000000..52a8324f470 --- /dev/null +++ b/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/TestChainAuthorization.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.chain; + +import java.time.Instant; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.apache.gravitino.Catalog; +import org.apache.gravitino.Namespace; +import org.apache.gravitino.TestCatalog; +import org.apache.gravitino.connector.AuthorizationPropertiesMeta; +import org.apache.gravitino.connector.authorization.AuthorizationPlugin; +import org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationHDFS; +import org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationHDFSPlugin; +import org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationHadoopSQL; +import org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationHadoopSQLPlugin; +import org.apache.gravitino.meta.AuditInfo; +import org.apache.gravitino.meta.CatalogEntity; +import org.apache.gravitino.utils.IsolatedClassLoader; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +public class TestChainAuthorization { + private static TestCatalog hiveCatalog; + + @BeforeAll + public static void setUp() { + AuditInfo auditInfo = + AuditInfo.builder().withCreator("test").withCreateTime(Instant.now()).build(); + + CatalogEntity hiveCatalogEntity = + CatalogEntity.builder() + .withId(1L) + .withName("catalog-test1") + .withNamespace(Namespace.of("default")) + .withType(Catalog.Type.RELATIONAL) + .withProvider("test") + .withAuditInfo(auditInfo) + .build(); + + Map catalogConf = new HashMap<>(); + catalogConf.put(Catalog.AUTHORIZATION_PROVIDER, "chain"); + catalogConf.put( + AuthorizationPropertiesMeta.getInstance().wildcardNodePropertyKey(), "hive1,hdfs1"); + catalogConf.put("authorization.chain.hive1.catalog-provider", "hive"); + catalogConf.put( + "authorization.chain.hive1.provider", TestRangerAuthorizationHadoopSQL.SHORT_NAME); + catalogConf.put("authorization.chain.hive1.ranger.auth.types", "simple"); + catalogConf.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080"); + catalogConf.put("authorization.chain.hive1.ranger.username", "admin"); + catalogConf.put("authorization.chain.hive1.ranger.password", "admin"); + catalogConf.put("authorization.chain.hive1.ranger.service.name", "hiveDev1"); + catalogConf.put("authorization.chain.hdfs1.catalog-provider", "hdfs"); + catalogConf.put("authorization.chain.hdfs1.provider", TestRangerAuthorizationHDFS.SHORT_NAME); + + hiveCatalog = + new TestCatalog().withCatalogConf(catalogConf).withCatalogEntity(hiveCatalogEntity); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader( + Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); + hiveCatalog.initAuthorizationPluginInstance(isolatedClassLoader); + } + + @Test + public void testChainAuthorization() { + AuthorizationPlugin authPlugin = hiveCatalog.getAuthorizationPlugin(); + Assertions.assertInstanceOf(ChainAuthorizationPlugin.class, authPlugin); + ChainAuthorizationPlugin chainAuthPlugin = (ChainAuthorizationPlugin) authPlugin; + Assertions.assertEquals(2, chainAuthPlugin.getPlugins().size()); + + chainAuthPlugin + .getPlugins() + .forEach( + plugin -> { + if (plugin instanceof TestRangerAuthorizationHadoopSQLPlugin) { + Assertions.assertFalse( + ((TestRangerAuthorizationHadoopSQLPlugin) plugin).callOnCreateRole1); + } else if (plugin instanceof TestRangerAuthorizationHDFSPlugin) { + Assertions.assertFalse( + ((TestRangerAuthorizationHDFSPlugin) plugin).callOnCreateRole2); + } + }); + + chainAuthPlugin.onRoleCreated(null); + + chainAuthPlugin + .getPlugins() + .forEach( + plugin -> { + if (plugin instanceof TestRangerAuthorizationHadoopSQLPlugin) { + Assertions.assertTrue( + ((TestRangerAuthorizationHadoopSQLPlugin) plugin).callOnCreateRole1); + } else if (plugin instanceof TestRangerAuthorizationHDFSPlugin) { + Assertions.assertTrue( + ((TestRangerAuthorizationHDFSPlugin) plugin).callOnCreateRole2); + } + }); + } +} diff --git a/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/TestChainAuthorizationPropertiesMeta.java b/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/TestChainAuthorizationPropertiesMeta.java new file mode 100644 index 00000000000..13dff4aa9de --- /dev/null +++ b/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/TestChainAuthorizationPropertiesMeta.java @@ -0,0 +1,297 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.chain; + +import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER; +import static org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE; + +import com.google.common.collect.Maps; +import java.util.HashMap; +import java.util.Map; +import org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv; +import org.apache.gravitino.catalog.PropertiesMetadataHelpers; +import org.apache.gravitino.catalog.hive.HiveConstants; +import org.apache.gravitino.connector.AuthorizationPropertiesMeta; +import org.apache.gravitino.connector.PropertiesMetadata; +import org.apache.gravitino.integration.test.container.RangerContainer; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class TestChainAuthorizationPropertiesMeta { + private static final AuthorizationPropertiesMeta authPropertiesMetaInstance = + AuthorizationPropertiesMeta.getInstance(); + private static final String CHAIN_PLUGIN_SHORT_NAME = authPropertiesMetaInstance.secondNodeName(); + + @Test + void testChainHiveCatalog() { + String pluginName = "hive1"; + Map properties = new HashMap<>(); + properties.put(HiveConstants.METASTORE_URIS, "thrift://localhost:9083"); + properties.put("gravitino.bypass.hive.metastore.client.capability.check", "true"); + properties.put(IMPERSONATION_ENABLE, "true"); + properties.put(AUTHORIZATION_PROVIDER, CHAIN_PLUGIN_SHORT_NAME); + properties.put( + authPropertiesMetaInstance.getPropertyValue( + pluginName, AuthorizationPropertiesMeta.getChainCatalogProviderKey()), + "hive"); + properties.put(authPropertiesMetaInstance.wildcardNodePropertyKey(), pluginName); + properties.put( + authPropertiesMetaInstance.getPropertyValue( + pluginName, AuthorizationPropertiesMeta.getChainProviderKey()), + CHAIN_PLUGIN_SHORT_NAME); + properties.put( + authPropertiesMetaInstance.getPropertyValue( + pluginName, AuthorizationPropertiesMeta.getRangerAuthTypeKey()), + RangerContainer.authType); + properties.put( + authPropertiesMetaInstance.getPropertyValue( + pluginName, AuthorizationPropertiesMeta.getRangerAdminUrlKey()), + "http://localhost:" + RangerContainer.RANGER_SERVER_PORT); + properties.put( + authPropertiesMetaInstance.getPropertyValue( + pluginName, AuthorizationPropertiesMeta.getRangerUsernameKey()), + RangerContainer.rangerUserName); + properties.put( + authPropertiesMetaInstance.getPropertyValue( + pluginName, AuthorizationPropertiesMeta.getRangerPasswordKey()), + RangerContainer.rangerPassword); + properties.put( + authPropertiesMetaInstance.getPropertyValue( + pluginName, AuthorizationPropertiesMeta.getRangerServiceNameKey()), + RangerITEnv.RANGER_HIVE_REPO_NAME); + PropertiesMetadata authorizationPropertiesMeta = new AuthorizationPropertiesMeta(); + Assertions.assertDoesNotThrow( + () -> + PropertiesMetadataHelpers.validatePropertyForCreate( + authorizationPropertiesMeta, properties)); + } + + @Test + void test1() { + Map properties = Maps.newHashMap(); + properties.put(HiveConstants.METASTORE_URIS, "HIVE_METASTORE_URIS"); + properties.put(IMPERSONATION_ENABLE, "true"); + + properties.put(AUTHORIZATION_PROVIDER, CHAIN_PLUGIN_SHORT_NAME); + properties.put(authPropertiesMetaInstance.wildcardNodePropertyKey(), "hive1,hdfs1"); + properties.put("authorization.chain.hive1.catalog-provider", "hive"); + properties.put("authorization.chain.hive1.provider", "ranger"); + properties.put("authorization.chain.hive1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hive1.ranger.username", "admin"); + properties.put("authorization.chain.hive1.ranger.password", "admin"); + properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev"); + properties.put("authorization.chain.hdfs1.catalog-provider", "hadoop"); + properties.put("authorization.chain.hdfs1.provider", "ranger"); + properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hdfs1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hdfs1.ranger.username", "admin"); + properties.put("authorization.chain.hdfs1.ranger.password", "admin"); + properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev"); + PropertiesMetadata authorizationPropertiesMeta = new AuthorizationPropertiesMeta(); + PropertiesMetadataHelpers.validatePropertyForCreate(authorizationPropertiesMeta, properties); + } + + @Test + void testWildcardPropertyChainPluginsOne() { + Map properties = Maps.newHashMap(); + properties.put(authPropertiesMetaInstance.wildcardNodePropertyKey(), "hive1"); + properties.put("authorization.chain.hive1.provider", "ranger"); + properties.put("authorization.chain.hive1.catalog-provider", "hive"); + properties.put("authorization.chain.hive1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hive1.ranger.username", "admin"); + properties.put("authorization.chain.hive1.ranger.password", "admin"); + properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev"); + PropertiesMetadata authorizationPropertiesMeta = new AuthorizationPropertiesMeta(); + Assertions.assertDoesNotThrow( + () -> + PropertiesMetadataHelpers.validatePropertyForCreate( + authorizationPropertiesMeta, properties)); + } + + @Test + void testWildcardPropertyChainPluginsTwo() { + Map properties = Maps.newHashMap(); + properties.put(authPropertiesMetaInstance.wildcardNodePropertyKey(), "hive1,hdfs1"); + properties.put("authorization.chain.hive1.provider", "ranger"); + properties.put("authorization.chain.hive1.catalog-provider", "hive"); + properties.put("authorization.chain.hive1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hive1.ranger.username", "admin"); + properties.put("authorization.chain.hive1.ranger.password", "admin"); + properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev"); + properties.put("authorization.chain.hdfs1.provider", "ranger"); + properties.put("authorization.chain.hdfs1.catalog-provider", "hadoop"); + properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hdfs1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hdfs1.ranger.username", "admin"); + properties.put("authorization.chain.hdfs1.ranger.password", "admin"); + properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev"); + PropertiesMetadata authorizationPropertiesMeta = new AuthorizationPropertiesMeta(); + Assertions.assertDoesNotThrow( + () -> + PropertiesMetadataHelpers.validatePropertyForCreate( + authorizationPropertiesMeta, properties)); + } + + @Test + void testWildcardPropertyChainPluginsHasSpace() { + Map properties = Maps.newHashMap(); + properties.put(authPropertiesMetaInstance.wildcardNodePropertyKey(), "hive1, hdfs1"); + properties.put("authorization.chain.hive1.provider", "ranger"); + properties.put("authorization.chain.hive1.catalog-provider", "hive"); + properties.put("authorization.chain.hive1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hive1.ranger.username", "admin"); + properties.put("authorization.chain.hive1.ranger.password", "admin"); + properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev"); + properties.put("authorization.chain.hdfs1.provider", "ranger"); + properties.put("authorization.chain.hdfs1.catalog-provider", "hadoop"); + properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hdfs1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hdfs1.ranger.username", "admin"); + properties.put("authorization.chain.hdfs1.ranger.password", "admin"); + properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev"); + PropertiesMetadata authorizationPropertiesMeta = new AuthorizationPropertiesMeta(); + Assertions.assertDoesNotThrow( + () -> + PropertiesMetadataHelpers.validatePropertyForCreate( + authorizationPropertiesMeta, properties)); + } + + @Test + void testWildcardPropertyChainPluginsOneButHasTowPluginConfig() { + Map properties = Maps.newHashMap(); + properties.put(authPropertiesMetaInstance.wildcardNodePropertyKey(), "hive1"); + properties.put("authorization.chain.hive1.provider", "ranger"); + properties.put("authorization.chain.hive1.catalog-provider", "hive"); + properties.put("authorization.chain.hive1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hive1.ranger.username", "admin"); + properties.put("authorization.chain.hive1.ranger.password", "admin"); + properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev"); + properties.put("authorization.chain.hdfs1.provider", "ranger"); + properties.put("authorization.chain.hdfs1.catalog-provider", "hadoop"); + properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hdfs1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hdfs1.ranger.username", "admin"); + properties.put("authorization.chain.hdfs1.ranger.password", "admin"); + properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev"); + PropertiesMetadata authorizationPropertiesMeta = new AuthorizationPropertiesMeta(); + Assertions.assertThrows( + IllegalArgumentException.class, + () -> + PropertiesMetadataHelpers.validatePropertyForCreate( + authorizationPropertiesMeta, properties)); + } + + @Test + void testWildcardPropertyChainPluginsHasPoint() { + Map properties = Maps.newHashMap(); + properties.put(authPropertiesMetaInstance.wildcardNodePropertyKey(), "plug.1, hdfs1"); + properties.put("authorization.chain.hive1.provider", "ranger"); + properties.put("authorization.chain.hive1.catalog-provider", "hive"); + properties.put("authorization.chain.plug.1.ranger.auth.type", "simple"); + properties.put("authorization.chain.plug.1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.plug.1.ranger.username", "admin"); + properties.put("authorization.chain.plug.1.ranger.password", "admin"); + properties.put("authorization.chain.plug.1.ranger.service.name", "hiveDev"); + properties.put("authorization.chain.hdfs1.provider", "ranger"); + properties.put("authorization.chain.hdfs1.catalog-provider", "hadoop"); + properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hdfs1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hdfs1.ranger.username", "admin"); + properties.put("authorization.chain.hdfs1.ranger.password", "admin"); + properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev"); + PropertiesMetadata authorizationPropertiesMeta = new AuthorizationPropertiesMeta(); + Assertions.assertThrows( + IllegalArgumentException.class, + () -> + PropertiesMetadataHelpers.validatePropertyForCreate( + authorizationPropertiesMeta, properties)); + } + + @Test + void testWildcardPropertyChainPluginErrorPluginName() { + Map properties = Maps.newHashMap(); + properties.put(authPropertiesMetaInstance.wildcardNodePropertyKey(), "hive1,hdfs1"); + properties.put("authorization.chain.hive1.provider", "ranger"); + properties.put("authorization.chain.hive1.catalog-provider", "hive"); + properties.put("authorization.chain.hive1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hive1.ranger.username", "admin"); + properties.put("authorization.chain.hive1.ranger.password", "admin"); + properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev"); + properties.put("authorization.chain.hdfs1.provider", "ranger"); + properties.put("authorization.chain.hdfs1.catalog-provider", "hadoop"); + properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hdfs1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hdfs1.ranger.username", "admin"); + properties.put("authorization.chain.hdfs1.ranger.password", "admin"); + properties.put("authorization.chain.plug3.ranger.service.name", "hdfsDev"); + PropertiesMetadata authorizationPropertiesMeta = new AuthorizationPropertiesMeta(); + Assertions.assertThrows( + IllegalArgumentException.class, + () -> + PropertiesMetadataHelpers.validatePropertyForCreate( + authorizationPropertiesMeta, properties)); + } + + @Test + void testWildcardPropertyChainPluginDuplicationPluginName() { + Map properties = Maps.newHashMap(); + properties.put(authPropertiesMetaInstance.wildcardNodePropertyKey(), "hive1,hive1,hdfs1"); + properties.put("authorization.chain.hive1.provider", "ranger"); + properties.put("authorization.chain.hive1.catalog-provider", "hive"); + properties.put("authorization.chain.hive1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hive1.ranger.username", "admin"); + properties.put("authorization.chain.hive1.ranger.password", "admin"); + properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev"); + properties.put("authorization.chain.hdfs1.provider", "ranger"); + properties.put("authorization.chain.hdfs1.catalog-provider", "hadoop"); + properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hdfs1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hdfs1.ranger.username", "admin"); + properties.put("authorization.chain.hdfs1.ranger.password", "admin"); + properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev"); + PropertiesMetadata authorizationPropertiesMeta = new AuthorizationPropertiesMeta(); + Assertions.assertThrows( + IllegalArgumentException.class, + () -> + PropertiesMetadataHelpers.validatePropertyForCreate( + authorizationPropertiesMeta, properties)); + } + + @Test + void testWildcardPropertyChainPluginErrorPropertyKey() { + Map properties = Maps.newHashMap(); + properties.put(authPropertiesMetaInstance.wildcardNodePropertyKey(), "hive1"); + properties.put("authorization.chain.hive1.provider", "ranger"); + properties.put("authorization.chain.hive1.catalog-provider", "hive"); + properties.put("authorization.chain.hive1.ranger-error.auth.types", "simple"); + PropertiesMetadata authorizationPropertiesMeta = new AuthorizationPropertiesMeta(); + Assertions.assertThrows( + IllegalArgumentException.class, + () -> + PropertiesMetadataHelpers.validatePropertyForCreate( + authorizationPropertiesMeta, properties)); + } +} diff --git a/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/integration/test/TestChainAuthorizationIT.java b/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/integration/test/TestChainAuthorizationIT.java new file mode 100644 index 00000000000..d401dabc14d --- /dev/null +++ b/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/integration/test/TestChainAuthorizationIT.java @@ -0,0 +1,124 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.chain.integration.test; + +import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER; +import static org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE; +import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.getRangerAdminUrlKey; +import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.getRangerAuthTypeKey; +import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.getRangerPasswordKey; +import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.getRangerServiceNameKey; +import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.getRangerUsernameKey; + +import java.util.HashMap; +import java.util.Map; +import org.apache.gravitino.Catalog; +import org.apache.gravitino.authorization.ranger.integration.test.RangerHiveE2EIT; +import org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv; +import org.apache.gravitino.catalog.hive.HiveConstants; +import org.apache.gravitino.connector.AuthorizationPropertiesMeta; +import org.apache.gravitino.integration.test.container.RangerContainer; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TestChainAuthorizationIT extends RangerHiveE2EIT { + private static final Logger LOG = LoggerFactory.getLogger(TestChainAuthorizationIT.class); + private static final String CHAIN_PLUGIN_SHORT_NAME = "chain"; + + @BeforeAll + public void startIntegrationTest() throws Exception { + super.startIntegrationTest(); + } + + @Override + public void createCatalog() { + String pluginName = "hive1"; + Map catalogConf = new HashMap<>(); + catalogConf.put(HiveConstants.METASTORE_URIS, HIVE_METASTORE_URIS); + catalogConf.put(IMPERSONATION_ENABLE, "true"); + catalogConf.put(AUTHORIZATION_PROVIDER, CHAIN_PLUGIN_SHORT_NAME); + catalogConf.put( + AuthorizationPropertiesMeta.getInstance().wildcardNodePropertyKey(), pluginName); + catalogConf.put( + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue(pluginName, AuthorizationPropertiesMeta.getChainProviderKey()), + CHAIN_PLUGIN_SHORT_NAME); + catalogConf.put( + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue(pluginName, getRangerAuthTypeKey()), + RangerContainer.authType); + catalogConf.put( + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue(pluginName, getRangerAdminUrlKey()), + RANGER_ADMIN_URL); + catalogConf.put( + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue(pluginName, getRangerUsernameKey()), + RangerContainer.rangerUserName); + catalogConf.put( + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue(pluginName, getRangerPasswordKey()), + RangerContainer.rangerPassword); + catalogConf.put( + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue(pluginName, getRangerServiceNameKey()), + RangerITEnv.RANGER_HIVE_REPO_NAME); + + metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, "hive", "comment", catalogConf); + catalog = metalake.loadCatalog(catalogName); + LOG.info("Catalog created: {}", catalog); + } + + @Test + public void testChainAuthorization() { + LOG.info(""); + } + + @Override + protected void checkTableAllPrivilegesExceptForCreating() {} + + @Override + protected void checkUpdateSQLWithReadWritePrivileges() {} + + @Override + protected void checkUpdateSQLWithReadPrivileges() {} + + @Override + protected void checkUpdateSQLWithWritePrivileges() {} + + @Override + protected void checkDeleteSQLWithReadWritePrivileges() {} + + @Override + protected void checkDeleteSQLWithReadPrivileges() {} + + @Override + protected void checkDeleteSQLWithWritePrivileges() {} + + @Override + protected void useCatalog() throws InterruptedException {} + + @Override + protected void checkWithoutPrivileges() {} + + @Override + protected void testAlterTable() {} +} diff --git a/authorizations/authorization-chain/src/test/resources/log4j2.properties b/authorizations/authorization-chain/src/test/resources/log4j2.properties new file mode 100644 index 00000000000..2a46c57ec2f --- /dev/null +++ b/authorizations/authorization-chain/src/test/resources/log4j2.properties @@ -0,0 +1,73 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Set to debug or trace if log4j initialization is failing +status = info + +# Name of the configuration +name = ConsoleLogConfig + +# Console appender configuration +appender.console.type = Console +appender.console.name = consoleLogger +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %-5p [%t] %c{1}:%L - %m%n + +# Log files location +property.logPath = ${sys:gravitino.log.path:-build/authorization-chain-integration-test.log} + +# File appender configuration +appender.file.type = File +appender.file.name = fileLogger +appender.file.fileName = ${logPath} +appender.file.layout.type = PatternLayout +appender.file.layout.pattern = %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5p %c - %m%n + +# Root logger level +rootLogger.level = info + +# Root logger referring to console and file appenders +rootLogger.appenderRef.stdout.ref = consoleLogger +rootLogger.appenderRef.file.ref = fileLogger + +# File appender configuration for testcontainers +appender.testcontainersFile.type = File +appender.testcontainersFile.name = testcontainersLogger +appender.testcontainersFile.fileName = build/testcontainers.log +appender.testcontainersFile.layout.type = PatternLayout +appender.testcontainersFile.layout.pattern = %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5p %c - %m%n + +# Logger for testcontainers +logger.testcontainers.name = org.testcontainers +logger.testcontainers.level = debug +logger.testcontainers.additivity = false +logger.testcontainers.appenderRef.file.ref = testcontainersLogger + +logger.tc.name = tc +logger.tc.level = debug +logger.tc.additivity = false +logger.tc.appenderRef.file.ref = testcontainersLogger + +logger.docker.name = com.github.dockerjava +logger.docker.level = warn +logger.docker.additivity = false +logger.docker.appenderRef.file.ref = testcontainersLogger + +logger.http.name = com.github.dockerjava.zerodep.shaded.org.apache.hc.client5.http.wire +logger.http.level = off diff --git a/authorizations/authorization-chain/src/test/resources/ranger-spark-security.xml.template b/authorizations/authorization-chain/src/test/resources/ranger-spark-security.xml.template new file mode 100644 index 00000000000..eb7f2b5e811 --- /dev/null +++ b/authorizations/authorization-chain/src/test/resources/ranger-spark-security.xml.template @@ -0,0 +1,45 @@ + + + + ranger.plugin.spark.policy.rest.url + __REPLACE__RANGER_ADMIN_URL + + + + ranger.plugin.spark.service.name + __REPLACE__RANGER_HIVE_REPO_NAME + + + + ranger.plugin.spark.policy.cache.dir + /tmp/policycache + + + + ranger.plugin.spark.policy.pollIntervalMs + 500 + + + + ranger.plugin.spark.policy.source.impl + org.apache.ranger.admin.client.RangerAdminRESTClient + + + \ No newline at end of file diff --git a/authorizations/authorization-ranger/build.gradle.kts b/authorizations/authorization-ranger/build.gradle.kts index f83aee72c54..4f7191a284a 100644 --- a/authorizations/authorization-ranger/build.gradle.kts +++ b/authorizations/authorization-ranger/build.gradle.kts @@ -38,7 +38,9 @@ dependencies { implementation(project(":core")) { exclude(group = "*") } - + implementation(project(":catalogs:catalog-common")) { + exclude(group = "*") + } implementation(libs.bundles.log4j) implementation(libs.commons.lang3) implementation(libs.guava) @@ -70,7 +72,7 @@ dependencies { testImplementation(project(":common")) testImplementation(project(":clients:client-java")) testImplementation(project(":server")) - testImplementation(project(":catalogs:catalog-common")) + testImplementation(project(":integration-test-common", "testArtifacts")) testImplementation(libs.junit.jupiter.api) testImplementation(libs.mockito.core) @@ -133,7 +135,7 @@ tasks.test { doFirst { environment("HADOOP_USER_NAME", "gravitino") } - dependsOn(":catalogs:catalog-hive:jar", ":catalogs:catalog-hive:runtimeJars", ":catalogs:catalog-lakehouse-iceberg:jar", ":catalogs:catalog-lakehouse-iceberg:runtimeJars", ":catalogs:catalog-lakehouse-paimon:jar", ":catalogs:catalog-lakehouse-paimon:runtimeJars") + dependsOn(":catalogs:catalog-hive:jar", ":catalogs:catalog-hive:runtimeJars", ":catalogs:catalog-lakehouse-iceberg:jar", ":catalogs:catalog-lakehouse-iceberg:runtimeJars", ":catalogs:catalog-lakehouse-paimon:jar", ":catalogs:catalog-lakehouse-paimon:runtimeJars", ":authorizations:authorization-chain:jar", ":authorizations:authorization-chain:runtimeJars") val skipITs = project.hasProperty("skipITs") if (skipITs) { @@ -143,3 +145,16 @@ tasks.test { dependsOn(tasks.jar) } } + +val testJar by tasks.registering(Jar::class) { + archiveClassifier.set("tests") + from(sourceSets["test"].output) +} + +configurations { + create("testArtifacts") +} + +artifacts { + add("testArtifacts", testJar) +} diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerBaseE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerBaseE2EIT.java index 95dc4f93636..dbd2df49e15 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerBaseE2EIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerBaseE2EIT.java @@ -169,6 +169,8 @@ protected void createMetalake() { metalake = loadMetalake; } + public abstract void createCatalog(); + protected static void waitForUpdatingPolicies() throws InterruptedException { // After Ranger authorization, Must wait a period of time for the Ranger Spark plugin to update // the policy Sleep time must be greater than the policy update interval diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java index cb41e79216c..fb1949ddb21 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java @@ -170,7 +170,8 @@ protected void testAlterTable() { sparkSession.sql(SQL_ALTER_TABLE); } - private static void createCatalog() { + @Override + public void createCatalog() { Map properties = ImmutableMap.of( HiveConstants.METASTORE_URIS, diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java index 2758d307bad..165f17077fd 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java @@ -55,7 +55,7 @@ public class RangerITEnv { private static final Logger LOG = LoggerFactory.getLogger(RangerITEnv.class); protected static final String RANGER_TRINO_REPO_NAME = "trinoDev"; private static final String RANGER_TRINO_TYPE = "trino"; - protected static final String RANGER_HIVE_REPO_NAME = "hiveDev"; + public static final String RANGER_HIVE_REPO_NAME = "hiveDev"; private static final String RANGER_HIVE_TYPE = "hive"; protected static final String RANGER_HDFS_REPO_NAME = "hdfsDev"; private static final String RANGER_HDFS_TYPE = "hdfs"; diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java index 7b45eda7a6e..54241fead3a 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java @@ -167,7 +167,8 @@ protected void testAlterTable() { sparkSession.sql(SQL_ALTER_TABLE_BACK); } - private static void createCatalog() { + @Override + public void createCatalog() { Map properties = ImmutableMap.of( IcebergConstants.URI, diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java index 7cb600b9d8c..6cb91f4205a 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java @@ -183,7 +183,8 @@ protected void testAlterTable() { sparkSession.sql(SQL_ALTER_TABLE_BACK); } - private static void createCatalog() { + @Override + public void createCatalog() { Map properties = ImmutableMap.of( "uri", diff --git a/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMeta.java b/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMeta.java index dc532e6014d..4af036f9b4a 100644 --- a/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMeta.java +++ b/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMeta.java @@ -110,7 +110,7 @@ public class HiveCatalogPropertiesMeta extends BaseCatalogPropertiesMetadata { DEFAULT_LIST_ALL_TABLES, false /* hidden */, false /* reserved */)) - .putAll(AuthorizationPropertiesMeta.RANGER_AUTHORIZATION_PROPERTY_ENTRIES) + .putAll(AuthorizationPropertiesMeta.AUTHORIZATION_PROPERTY_ENTRIES) .putAll(CLIENT_PROPERTIES_METADATA.propertyEntries()) .build(); diff --git a/core/build.gradle.kts b/core/build.gradle.kts index b09c0e35889..4579ef3a02c 100644 --- a/core/build.gradle.kts +++ b/core/build.gradle.kts @@ -73,3 +73,16 @@ tasks.test { environment("GRAVITINO_HOME", project.rootDir.path + "/distribution/package") } } + +val testJar by tasks.registering(Jar::class) { + archiveClassifier.set("tests") + from(sourceSets["test"].output) +} + +configurations { + create("testArtifacts") +} + +artifacts { + add("testArtifacts", testJar) +} diff --git a/core/src/main/java/org/apache/gravitino/catalog/PropertiesMetadataHelpers.java b/core/src/main/java/org/apache/gravitino/catalog/PropertiesMetadataHelpers.java index 54320c325c7..286a063600e 100644 --- a/core/src/main/java/org/apache/gravitino/catalog/PropertiesMetadataHelpers.java +++ b/core/src/main/java/org/apache/gravitino/catalog/PropertiesMetadataHelpers.java @@ -26,6 +26,7 @@ import java.util.stream.Collectors; import org.apache.gravitino.connector.PropertiesMetadata; import org.apache.gravitino.connector.PropertyEntry; +import org.apache.gravitino.connector.WildcardPropertiesMeta; /** This class contains helper methods for properties metadata. */ public class PropertiesMetadataHelpers { @@ -67,6 +68,8 @@ public static void validatePropertyForCreate( "Properties are required and must be set: %s", absentProperties); + WildcardPropertiesMeta.validate(propertiesMetadata, properties); + // use decode function to validate the property values for (Map.Entry entry : properties.entrySet()) { String key = entry.getKey(); diff --git a/core/src/main/java/org/apache/gravitino/connector/AuthorizationPropertiesMeta.java b/core/src/main/java/org/apache/gravitino/connector/AuthorizationPropertiesMeta.java index e1b389f7ca3..69242e1b397 100644 --- a/core/src/main/java/org/apache/gravitino/connector/AuthorizationPropertiesMeta.java +++ b/core/src/main/java/org/apache/gravitino/connector/AuthorizationPropertiesMeta.java @@ -21,25 +21,166 @@ import com.google.common.collect.ImmutableMap; import java.util.Map; -public class AuthorizationPropertiesMeta { +public class AuthorizationPropertiesMeta extends BasePropertiesMetadata + implements WildcardPropertiesMeta { + private static volatile AuthorizationPropertiesMeta instance = null; + + public static synchronized AuthorizationPropertiesMeta getInstance() { + if (instance == null) { + synchronized (AuthorizationPropertiesMeta.class) { + if (instance == null) { + instance = new AuthorizationPropertiesMeta(); + } + } + } + return instance; + } + /** Ranger admin web URIs */ - public static final String RANGER_ADMIN_URL = "authorization.ranger.admin.url"; + private static final String RANGER_ADMIN_URL_KEY = "ranger.admin.url"; + + public static final String getRangerAdminUrlKey() { + return RANGER_ADMIN_URL_KEY; + } + + public static final String RANGER_ADMIN_URL = + AuthorizationPropertiesMeta.getInstance().generateFirstNodePropertyKey(RANGER_ADMIN_URL_KEY); /** Ranger authentication type kerberos or simple */ - public static final String RANGER_AUTH_TYPE = "authorization.ranger.auth.type"; + private static final String RANGER_AUTH_TYPE_KEY = "ranger.auth.type"; + + public static final String getRangerAuthTypeKey() { + return RANGER_AUTH_TYPE_KEY; + } + + public static final String RANGER_AUTH_TYPE = + AuthorizationPropertiesMeta.getInstance().generateFirstNodePropertyKey(RANGER_AUTH_TYPE_KEY); /** * Ranger admin web login username(auth_type=simple), or kerberos principal(auth_type=kerberos) */ - public static final String RANGER_USERNAME = "authorization.ranger.username"; + private static final String RANGER_USERNAME_KEY = "ranger.username"; + + public static final String getRangerUsernameKey() { + return RANGER_USERNAME_KEY; + } + + public static final String RANGER_USERNAME = + AuthorizationPropertiesMeta.getInstance().generateFirstNodePropertyKey(RANGER_USERNAME_KEY); /** * Ranger admin web login user password(auth_type=simple), or path of the keytab * file(auth_type=kerberos) */ - public static final String RANGER_PASSWORD = "authorization.ranger.password"; + private static final String RANGER_PASSWORD_KEY = "ranger.password"; + + public static final String getRangerPasswordKey() { + return RANGER_PASSWORD_KEY; + } + + public static final String RANGER_PASSWORD = + AuthorizationPropertiesMeta.getInstance().generateFirstNodePropertyKey(RANGER_PASSWORD_KEY); + /** Ranger service name */ - public static final String RANGER_SERVICE_NAME = "authorization.ranger.service.name"; + private static final String RANGER_SERVICE_NAME_KEY = "ranger.service.name"; + + public static final String getRangerServiceNameKey() { + return RANGER_SERVICE_NAME_KEY; + } + + public static final String RANGER_SERVICE_NAME = + AuthorizationPropertiesMeta.getInstance() + .generateFirstNodePropertyKey(RANGER_SERVICE_NAME_KEY); + + /** Chain authorization plugin provider */ + private static final String CHAIN_CATALOG_PROVIDER_KEY = "catalog-provider"; + + public static final String getChainCatalogProviderKey() { + return CHAIN_CATALOG_PROVIDER_KEY; + } + + public static final String CHAIN_CATALOG_PROVIDER = + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue(Constants.WILDCARD, CHAIN_CATALOG_PROVIDER_KEY); + + /** Chain authorization plugin provider */ + private static final String CHAIN_PROVIDER_KEY = "provider"; - public static final Map> RANGER_AUTHORIZATION_PROPERTY_ENTRIES = + public static final String getChainProviderKey() { + return CHAIN_PROVIDER_KEY; + } + + public static final String CHAIN_PROVIDER = + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue(Constants.WILDCARD, CHAIN_PROVIDER_KEY); + /** Chain authorization Ranger admin web URIs */ + public static final String CHAIN_RANGER_ADMIN_URL = + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue(Constants.WILDCARD, RANGER_ADMIN_URL_KEY); + /** Chain authorization Ranger authentication type kerberos or simple */ + public static final String CHAIN_RANGER_AUTH_TYPES = + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue(Constants.WILDCARD, RANGER_AUTH_TYPE_KEY); + /** Chain authorization Ranger username */ + public static final String CHAIN_RANGER_USERNAME = + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue(Constants.WILDCARD, RANGER_USERNAME_KEY); + /** + * Chain authorization Ranger admin web login user password(auth_type=simple), or path of the + * keytab file(auth_type=kerberos) + */ + public static final String CHAIN_RANGER_PASSWORD = + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue(Constants.WILDCARD, RANGER_PASSWORD_KEY); + /** Chain authorization Ranger service name */ + public static final String CHAIN_RANGER_SERVICE_NAME = + AuthorizationPropertiesMeta.getInstance() + .getPropertyValue(Constants.WILDCARD, RANGER_SERVICE_NAME_KEY); + + public static String chainKeyToPluginKey(String chainKey, String plugin) { + return chainKey.replace( + String.format( + "%s.%s", AuthorizationPropertiesMeta.getInstance().wildcardNodePropertyKey(), plugin), + AuthorizationPropertiesMeta.getInstance().firstNodeName()); + } + + public static final Map> AUTHORIZATION_PROPERTY_ENTRIES = ImmutableMap.>builder() + .put( + AuthorizationPropertiesMeta.getInstance().wildcardNodePropertyKey(), + PropertyEntry.wildcardPropertyEntry( + AuthorizationPropertiesMeta.getInstance().wildcardNodePropertyKey(), + "The Chain authorization plugins")) + .put( + CHAIN_CATALOG_PROVIDER, + PropertyEntry.wildcardPropertyEntry( + CHAIN_PROVIDER, "The Chain sub entity catalog provider")) + .put( + CHAIN_PROVIDER, + PropertyEntry.wildcardPropertyEntry( + CHAIN_PROVIDER, "The Chain sub entity authorization plugin provider")) + .put( + CHAIN_RANGER_SERVICE_NAME, + PropertyEntry.wildcardPropertyEntry( + CHAIN_RANGER_SERVICE_NAME, + "The Chain sub entity authorization Ranger service name")) + .put( + CHAIN_RANGER_ADMIN_URL, + PropertyEntry.wildcardPropertyEntry( + CHAIN_RANGER_ADMIN_URL, + "The Chain sub entity authorization Ranger admin web URIs")) + .put( + CHAIN_RANGER_AUTH_TYPES, + PropertyEntry.wildcardPropertyEntry( + CHAIN_RANGER_AUTH_TYPES, + "The Chain sub entity authorization Ranger admin web auth type (kerberos/simple)")) + .put( + CHAIN_RANGER_USERNAME, + PropertyEntry.wildcardPropertyEntry( + CHAIN_RANGER_USERNAME, + "The Chain sub entity authorization Ranger admin web login username")) + .put( + CHAIN_RANGER_PASSWORD, + PropertyEntry.wildcardPropertyEntry( + CHAIN_RANGER_PASSWORD, + "The Chain sub entity authorization Ranger admin web login password")) .put( RANGER_SERVICE_NAME, PropertyEntry.stringOptionalPropertyEntry( @@ -65,4 +206,24 @@ public class AuthorizationPropertiesMeta { PropertyEntry.stringOptionalPropertyEntry( RANGER_PASSWORD, "The Ranger admin web login password", true, null, false)) .build(); + + @Override + protected Map> specificPropertyEntries() { + return AUTHORIZATION_PROPERTY_ENTRIES; + } + + @Override + public String firstNodeName() { + return "authorization"; + } + + @Override + public String secondNodeName() { + return "chain"; + } + + @Override + public String wildcardNodeName() { + return "plugins"; + } } diff --git a/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java b/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java index 07bc83b623b..9f14b86ccdb 100644 --- a/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java +++ b/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java @@ -19,22 +19,16 @@ package org.apache.gravitino.connector; import com.google.common.base.Preconditions; -import com.google.common.collect.Iterables; import com.google.common.collect.Maps; -import com.google.common.collect.Streams; import java.io.Closeable; import java.io.IOException; -import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.ServiceLoader; -import java.util.stream.Collectors; import org.apache.gravitino.Audit; import org.apache.gravitino.Catalog; import org.apache.gravitino.CatalogProvider; import org.apache.gravitino.annotation.Evolving; import org.apache.gravitino.connector.authorization.AuthorizationPlugin; -import org.apache.gravitino.connector.authorization.AuthorizationProvider; import org.apache.gravitino.connector.authorization.BaseAuthorization; import org.apache.gravitino.connector.capability.Capability; import org.apache.gravitino.meta.CatalogEntity; @@ -203,28 +197,7 @@ public void initAuthorizationPluginInstance(IsolatedClassLoader classLoader) { authorization = classLoader.withClassLoader( cl -> { - try { - ServiceLoader loader = - ServiceLoader.load(AuthorizationProvider.class, cl); - - List> providers = - Streams.stream(loader.iterator()) - .filter(p -> p.shortName().equalsIgnoreCase(authorizationProvider)) - .map(AuthorizationProvider::getClass) - .collect(Collectors.toList()); - if (providers.isEmpty()) { - throw new IllegalArgumentException( - "No authorization provider found for: " + authorizationProvider); - } else if (providers.size() > 1) { - throw new IllegalArgumentException( - "Multiple authorization providers found for: " + authorizationProvider); - } - return (BaseAuthorization) - Iterables.getOnlyElement(providers).getDeclaredConstructor().newInstance(); - } catch (Exception e) { - LOG.error("Failed to create authorization instance", e); - throw new RuntimeException(e); - } + return BaseAuthorization.createAuthorization(cl, authorizationProvider); }); } catch (Exception e) { LOG.error("Failed to load authorization with class loader", e); diff --git a/core/src/main/java/org/apache/gravitino/connector/PropertiesMetadata.java b/core/src/main/java/org/apache/gravitino/connector/PropertiesMetadata.java index d4778b2ff90..c1d7a7d7d01 100644 --- a/core/src/main/java/org/apache/gravitino/connector/PropertiesMetadata.java +++ b/core/src/main/java/org/apache/gravitino/connector/PropertiesMetadata.java @@ -50,6 +50,17 @@ default boolean isRequiredProperty(String propertyName) { && propertyEntries().get(propertyName).isRequired(); } + /** + * Check if the property is wildcard. + * + * @param propertyName The name of the property. + * @return true if the property is existed and wildcard, false otherwise. + */ + default boolean isWildcardProperty(String propertyName) { + return propertyEntries().containsKey(propertyName) + && propertyEntries().get(propertyName).isWildcard(); + } + /** * Check if the property is immutable. * diff --git a/core/src/main/java/org/apache/gravitino/connector/PropertyEntry.java b/core/src/main/java/org/apache/gravitino/connector/PropertyEntry.java index b4c788a60d8..4c606c27be9 100644 --- a/core/src/main/java/org/apache/gravitino/connector/PropertyEntry.java +++ b/core/src/main/java/org/apache/gravitino/connector/PropertyEntry.java @@ -39,6 +39,7 @@ public final class PropertyEntry { private final Function encoder; private final boolean hidden; private final boolean reserved; + private final boolean wildcard; /** * @param name The name of the property @@ -64,7 +65,8 @@ private PropertyEntry( Function decoder, Function encoder, boolean hidden, - boolean reserved) { + boolean reserved, + boolean wildcard) { Preconditions.checkArgument(StringUtils.isNotBlank(name), "name cannot be null or empty"); Preconditions.checkArgument( StringUtils.isNotBlank(description), "description cannot be null or empty"); @@ -87,6 +89,7 @@ private PropertyEntry( this.encoder = encoder; this.hidden = hidden; this.reserved = reserved; + this.wildcard = wildcard; } public static class Builder { @@ -100,6 +103,7 @@ public static class Builder { private Function encoder; private boolean hidden; private boolean reserved; + private boolean wildcard; public Builder withName(String name) { this.name = name; @@ -151,6 +155,11 @@ public Builder withReserved(boolean reserved) { return this; } + public Builder withWildcard(boolean wildcard) { + this.wildcard = wildcard; + return this; + } + public PropertyEntry build() { return new PropertyEntry( name, @@ -162,7 +171,8 @@ public PropertyEntry build() { decoder, encoder, hidden, - reserved); + reserved, + wildcard); } } @@ -268,6 +278,22 @@ public static PropertyEntry booleanReservedPropertyEntry( return booleanPropertyEntry(name, description, false, true, defaultValue, hidden, true); } + public static PropertyEntry wildcardPropertyEntry(String name, String description) { + return new Builder() + .withName(name) + .withDescription(description) + .withRequired(false) + .withImmutable(false) + .withJavaType(String.class) + .withDefaultValue(null) + .withDecoder(Function.identity()) + .withEncoder(Function.identity()) + .withHidden(false) + .withReserved(false) + .withWildcard(true) + .build(); + } + public static PropertyEntry booleanPropertyEntry( String name, String description, diff --git a/core/src/main/java/org/apache/gravitino/connector/WildcardPropertiesMeta.java b/core/src/main/java/org/apache/gravitino/connector/WildcardPropertiesMeta.java new file mode 100644 index 00000000000..943bfc517de --- /dev/null +++ b/core/src/main/java/org/apache/gravitino/connector/WildcardPropertiesMeta.java @@ -0,0 +1,174 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.connector; + +import com.google.common.base.Preconditions; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +/** + * WildcardPropertiesMeta is a utility class to validate wildcard properties in the properties + * metadata.
+ *
+ * WildcardPropertiesMeta interface defines:
+ * FirstNode.SecondNode.WildcardNode = ""
+ * FirstNode.SecondNode.*.property-key1 = ""
+ * FirstNode.SecondNode.*.property-key2 = ""
+ *
+ * Use define a WildcardPropertiesMeta object:
+ * FirstNode.SecondNode.WildcardNode = "WildcardValue1,WildcardValue2"
+ * FirstNode.SecondNode.{WildcardValue1}.property-key1 = "WildcardValue1 property-key1 value"
+ * FirstNode.SecondNode.{WildcardValue1}.property-key2 = "WildcardValue1 property-key2 value"
+ * FirstNode.SecondNode.{WildcardValue2}.property-key1 = "WildcardValue2 property-key1 value"
+ * FirstNode.SecondNode.{WildcardValue2}.property-key2 = "WildcardValue2 property-key2 value"
+ *
+ * Configuration Example: {@link AuthorizationPropertiesMeta}
+ * "authorization.chain.plugins" = "hive1,hdfs1"
+ * "authorization.chain.hive1.provider" = "ranger";
+ * "authorization.chain.hive1.catalog-provider" = "hive";
+ * "authorization.chain.hive1.ranger.auth.type" = "simple";
+ * "authorization.chain.hive1.ranger.admin.url" = "http://localhost:6080";
+ * "authorization.chain.hive1.ranger.username" = "admin";
+ * "authorization.chain.hive1.ranger.password" = "admin";
+ * "authorization.chain.hive1.ranger.service.name" = "hiveDev";
+ * "authorization.chain.hdfs1.provider" = "ranger";
+ * "authorization.chain.hdfs1.catalog-provider" = "hadoop";
+ * "authorization.chain.hdfs1.ranger.auth.type" = "simple";
+ * "authorization.chain.hdfs1.ranger.admin.url" = "http://localhost:6080";
+ * "authorization.chain.hdfs1.ranger.username" = "admin";
+ * "authorization.chain.hdfs1.ranger.password" = "admin";
+ * "authorization.chain.hdfs1.ranger.service.name" = "hdfsDev";
+ */ +public interface WildcardPropertiesMeta { + class Constants { + public static final String WILDCARD = "*"; + public static final String WILDCARD_CONFIG_VALUES_SPLITTER = ","; + } + + /** The FirstNode define name */ + String firstNodeName(); + /** The SecondNode define name */ + String secondNodeName(); + /** The WildcardNode define name */ + String wildcardNodeName(); + /** Generate FirstNode properties key name */ + default String generateFirstNodePropertyKey(String key) { + return String.format("%s.%s", firstNodeName(), key); + } + /** The `FirstNode.SecondNode` property key name */ + default String secondNodePropertyKey() { + return String.format("%s.%s", firstNodeName(), secondNodeName()); + } + /** The `FirstNode.SecondNode.WildcardNode` properties key name */ + default String wildcardNodePropertyKey() { + return String.format("%s.%s.%s", firstNodeName(), secondNodeName(), wildcardNodeName()); + } + /** Get the property value by wildcard value and property key */ + default String getPropertyValue(String wildcardValue, String propertyKey) { + return String.format( + "%s.%s.%s.%s", firstNodeName(), secondNodeName(), wildcardValue, propertyKey); + } + + /** + * Validate the wildcard properties in the properties metadata. + * + * @param propertiesMetadata the properties metadata + * @param properties the properties + * @throws IllegalArgumentException if the wildcard properties are not valid + */ + static void validate(PropertiesMetadata propertiesMetadata, Map properties) + throws IllegalArgumentException { + // Get all wildcard properties from PropertiesMetadata + List wildcardProperties = + propertiesMetadata.propertyEntries().keySet().stream() + .filter(propertiesMetadata::isWildcardProperty) + .collect(Collectors.toList()); + if (wildcardProperties.size() > 0) { + String wildcardNodePropertyKey = + ((WildcardPropertiesMeta) propertiesMetadata).wildcardNodePropertyKey(); + String wildcardNodePropertyValues = properties.get(wildcardNodePropertyKey); + Preconditions.checkArgument( + wildcardNodePropertyValues != null, + "Wildcard properties `%s` not found in the properties", + wildcardNodePropertyKey); + + // Get the wildcard values from the properties + List wildcardValues = + Arrays.stream( + properties + .get(wildcardNodePropertyKey) + .split(Constants.WILDCARD_CONFIG_VALUES_SPLITTER)) + .map(String::trim) + .collect(Collectors.toList()); + wildcardValues.stream() + .filter(v -> v.contains(".")) + .forEach( + v -> { + throw new IllegalArgumentException( + String.format( + "Wildcard property values cannot be set with `.` character in the `%s = %s`.", + wildcardNodePropertyKey, properties.get(wildcardNodePropertyKey))); + }); + Preconditions.checkArgument( + wildcardValues.size() == wildcardValues.stream().distinct().count(), + "Duplicate values in wildcard config values: %s", + wildcardValues); + + // Get all wildcard properties with wildcard values + List patterns = + wildcardProperties.stream() + .filter(k -> k.contains(Constants.WILDCARD)) + .collect(Collectors.toList()) + .stream() + .map(wildcard -> wildcard.replace(".", "\\.").replace(Constants.WILDCARD, "([^.]+)")) + .map(Pattern::compile) + .collect(Collectors.toList()); + + String secondNodePropertyKey = + ((WildcardPropertiesMeta) propertiesMetadata).secondNodePropertyKey(); + for (String key : + properties.keySet().stream() + .filter( + k -> !k.equals(wildcardNodePropertyKey) && k.startsWith(secondNodePropertyKey)) + .collect(Collectors.toList())) { + boolean matches = + patterns.stream() + .anyMatch( + pattern -> { + Matcher matcher = pattern.matcher(key); + if (matcher.find()) { + String group = matcher.group(1); + return wildcardValues.contains(group); + } else { + return false; + } + }); + Preconditions.checkArgument( + matches, + "Wildcard properties `%s` not a valid wildcard config with values: %s", + key, + wildcardValues); + } + } + } +} diff --git a/core/src/main/java/org/apache/gravitino/connector/authorization/BaseAuthorization.java b/core/src/main/java/org/apache/gravitino/connector/authorization/BaseAuthorization.java index ce460e675e1..30b983c31d4 100644 --- a/core/src/main/java/org/apache/gravitino/connector/authorization/BaseAuthorization.java +++ b/core/src/main/java/org/apache/gravitino/connector/authorization/BaseAuthorization.java @@ -18,9 +18,14 @@ */ package org.apache.gravitino.connector.authorization; +import com.google.common.collect.Iterables; +import com.google.common.collect.Streams; import java.io.Closeable; import java.io.IOException; +import java.util.List; import java.util.Map; +import java.util.ServiceLoader; +import java.util.stream.Collectors; /** * The abstract base class for Authorization implementations.
@@ -58,6 +63,31 @@ public AuthorizationPlugin plugin( return plugin; } + public static BaseAuthorization createAuthorization( + ClassLoader classLoader, String authorizationProvider) { + try { + ServiceLoader loader = + ServiceLoader.load(AuthorizationProvider.class, classLoader); + + List> providers = + Streams.stream(loader.iterator()) + .filter(p -> p.shortName().equalsIgnoreCase(authorizationProvider)) + .map(AuthorizationProvider::getClass) + .collect(Collectors.toList()); + if (providers.isEmpty()) { + throw new IllegalArgumentException( + "No authorization provider found for: " + authorizationProvider); + } else if (providers.size() > 1) { + throw new IllegalArgumentException( + "Multiple authorization providers found for: " + authorizationProvider); + } + return (BaseAuthorization) + Iterables.getOnlyElement(providers).getDeclaredConstructor().newInstance(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + @Override public void close() throws IOException { if (plugin != null) { diff --git a/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java b/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java index 554ef0cec8b..084edb693f5 100644 --- a/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java +++ b/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java @@ -24,8 +24,10 @@ import org.apache.gravitino.Catalog; import org.apache.gravitino.Namespace; import org.apache.gravitino.TestCatalog; -import org.apache.gravitino.connector.authorization.mysql.TestMySQLAuthorizationPlugin; -import org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationPlugin; +import org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationHDFS; +import org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationHDFSPlugin; +import org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationHadoopSQL; +import org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationHadoopSQLPlugin; import org.apache.gravitino.meta.AuditInfo; import org.apache.gravitino.meta.CatalogEntity; import org.apache.gravitino.utils.IsolatedClassLoader; @@ -35,7 +37,7 @@ public class TestAuthorization { private static TestCatalog hiveCatalog; - private static TestCatalog mySQLCatalog; + private static TestCatalog filesetCatalog; @BeforeAll public static void setUp() throws Exception { @@ -54,49 +56,53 @@ public static void setUp() throws Exception { hiveCatalog = new TestCatalog() - .withCatalogConf(ImmutableMap.of(Catalog.AUTHORIZATION_PROVIDER, "ranger")) + .withCatalogConf( + ImmutableMap.of( + Catalog.AUTHORIZATION_PROVIDER, TestRangerAuthorizationHadoopSQL.SHORT_NAME)) .withCatalogEntity(hiveCatalogEntity); IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader( Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); hiveCatalog.initAuthorizationPluginInstance(isolatedClassLoader); - CatalogEntity mySQLEntity = + CatalogEntity filesetEntity = CatalogEntity.builder() .withId(2L) .withName("catalog-test2") .withNamespace(Namespace.of("default")) - .withType(Catalog.Type.RELATIONAL) + .withType(Catalog.Type.FILESET) .withProvider("test") .withAuditInfo(auditInfo) .build(); - mySQLCatalog = + filesetCatalog = new TestCatalog() - .withCatalogConf(ImmutableMap.of(Catalog.AUTHORIZATION_PROVIDER, "mysql")) - .withCatalogEntity(mySQLEntity); - mySQLCatalog.initAuthorizationPluginInstance(isolatedClassLoader); + .withCatalogConf( + ImmutableMap.of( + Catalog.AUTHORIZATION_PROVIDER, TestRangerAuthorizationHDFS.SHORT_NAME)) + .withCatalogEntity(filesetEntity); + filesetCatalog.initAuthorizationPluginInstance(isolatedClassLoader); } @Test - public void testRangerAuthorization() { - AuthorizationPlugin rangerAuthPlugin = hiveCatalog.getAuthorizationPlugin(); - Assertions.assertInstanceOf(TestRangerAuthorizationPlugin.class, rangerAuthPlugin); - TestRangerAuthorizationPlugin testRangerAuthPlugin = - (TestRangerAuthorizationPlugin) rangerAuthPlugin; - Assertions.assertFalse(testRangerAuthPlugin.callOnCreateRole1); - rangerAuthPlugin.onRoleCreated(null); - Assertions.assertTrue(testRangerAuthPlugin.callOnCreateRole1); + public void testRangerHadoopSQLAuthorization() { + AuthorizationPlugin rangerHiveAuthPlugin = hiveCatalog.getAuthorizationPlugin(); + Assertions.assertInstanceOf(TestRangerAuthorizationHadoopSQLPlugin.class, rangerHiveAuthPlugin); + TestRangerAuthorizationHadoopSQLPlugin testRangerAuthHadoopSQLPlugin = + (TestRangerAuthorizationHadoopSQLPlugin) rangerHiveAuthPlugin; + Assertions.assertFalse(testRangerAuthHadoopSQLPlugin.callOnCreateRole1); + rangerHiveAuthPlugin.onRoleCreated(null); + Assertions.assertTrue(testRangerAuthHadoopSQLPlugin.callOnCreateRole1); } @Test - public void testMySQLAuthorization() { - AuthorizationPlugin mySQLAuthPlugin = mySQLCatalog.getAuthorizationPlugin(); - Assertions.assertInstanceOf(TestMySQLAuthorizationPlugin.class, mySQLAuthPlugin); - TestMySQLAuthorizationPlugin testMySQLAuthPlugin = - (TestMySQLAuthorizationPlugin) mySQLAuthPlugin; - Assertions.assertFalse(testMySQLAuthPlugin.callOnCreateRole2); - mySQLAuthPlugin.onRoleCreated(null); - Assertions.assertTrue(testMySQLAuthPlugin.callOnCreateRole2); + public void testRangerHDFSAuthorization() { + AuthorizationPlugin rangerHDFSAuthPlugin = filesetCatalog.getAuthorizationPlugin(); + Assertions.assertInstanceOf(TestRangerAuthorizationHDFSPlugin.class, rangerHDFSAuthPlugin); + TestRangerAuthorizationHDFSPlugin testRangerAuthHDFSPlugin = + (TestRangerAuthorizationHDFSPlugin) rangerHDFSAuthPlugin; + Assertions.assertFalse(testRangerAuthHDFSPlugin.callOnCreateRole2); + rangerHDFSAuthPlugin.onRoleCreated(null); + Assertions.assertTrue(testRangerAuthHDFSPlugin.callOnCreateRole2); } } diff --git a/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorization.java b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHDFS.java similarity index 81% rename from core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorization.java rename to core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHDFS.java index 383339d0847..040fd9815b9 100644 --- a/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorization.java +++ b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHDFS.java @@ -22,18 +22,19 @@ import org.apache.gravitino.connector.authorization.AuthorizationPlugin; import org.apache.gravitino.connector.authorization.BaseAuthorization; -public class TestRangerAuthorization extends BaseAuthorization { +public class TestRangerAuthorizationHDFS extends BaseAuthorization { + public static final String SHORT_NAME = "test_ranger_hdfs"; - public TestRangerAuthorization() {} + public TestRangerAuthorizationHDFS() {} @Override public String shortName() { - return "ranger"; + return SHORT_NAME; } @Override protected AuthorizationPlugin newPlugin( String metalake, String catalogProvider, Map config) { - return new TestRangerAuthorizationPlugin(); + return new TestRangerAuthorizationHDFSPlugin(); } } diff --git a/core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorizationPlugin.java b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHDFSPlugin.java similarity index 95% rename from core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorizationPlugin.java rename to core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHDFSPlugin.java index e078eda410e..fdc28f8143e 100644 --- a/core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorizationPlugin.java +++ b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHDFSPlugin.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.apache.gravitino.connector.authorization.mysql; +package org.apache.gravitino.connector.authorization.ranger; import java.io.IOException; import java.util.List; @@ -29,7 +29,7 @@ import org.apache.gravitino.authorization.User; import org.apache.gravitino.connector.authorization.AuthorizationPlugin; -public class TestMySQLAuthorizationPlugin implements AuthorizationPlugin { +public class TestRangerAuthorizationHDFSPlugin implements AuthorizationPlugin { public boolean callOnCreateRole2 = false; @Override diff --git a/core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorization.java b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHadoopSQL.java similarity index 75% rename from core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorization.java rename to core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHadoopSQL.java index db7c629bbd5..01482f75590 100644 --- a/core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorization.java +++ b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHadoopSQL.java @@ -16,24 +16,27 @@ * specific language governing permissions and limitations * under the License. */ -package org.apache.gravitino.connector.authorization.mysql; +package org.apache.gravitino.connector.authorization.ranger; import java.util.Map; import org.apache.gravitino.connector.authorization.AuthorizationPlugin; import org.apache.gravitino.connector.authorization.BaseAuthorization; -public class TestMySQLAuthorization extends BaseAuthorization { +public class TestRangerAuthorizationHadoopSQL + extends BaseAuthorization { - public TestMySQLAuthorization() {} + public static final String SHORT_NAME = "test_ranger_hadoop_sql"; + + public TestRangerAuthorizationHadoopSQL() {} @Override public String shortName() { - return "mysql"; + return SHORT_NAME; } @Override protected AuthorizationPlugin newPlugin( String metalake, String catalogProvider, Map config) { - return new TestMySQLAuthorizationPlugin(); + return new TestRangerAuthorizationHadoopSQLPlugin(); } } diff --git a/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationPlugin.java b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHadoopSQLPlugin.java similarity index 97% rename from core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationPlugin.java rename to core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHadoopSQLPlugin.java index 8a68f825d0e..10dbe521e6c 100644 --- a/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationPlugin.java +++ b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHadoopSQLPlugin.java @@ -29,7 +29,7 @@ import org.apache.gravitino.authorization.User; import org.apache.gravitino.connector.authorization.AuthorizationPlugin; -public class TestRangerAuthorizationPlugin implements AuthorizationPlugin { +public class TestRangerAuthorizationHadoopSQLPlugin implements AuthorizationPlugin { public boolean callOnCreateRole1 = false; @Override diff --git a/core/src/test/resources/META-INF/services/org.apache.gravitino.connector.authorization.AuthorizationProvider b/core/src/test/resources/META-INF/services/org.apache.gravitino.connector.authorization.AuthorizationProvider index e49cb8937e0..fc878c208ad 100644 --- a/core/src/test/resources/META-INF/services/org.apache.gravitino.connector.authorization.AuthorizationProvider +++ b/core/src/test/resources/META-INF/services/org.apache.gravitino.connector.authorization.AuthorizationProvider @@ -16,5 +16,5 @@ # specific language governing permissions and limitations # under the License. # -org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorization -org.apache.gravitino.connector.authorization.mysql.TestMySQLAuthorization \ No newline at end of file +org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationHadoopSQL +org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationHDFS \ No newline at end of file diff --git a/settings.gradle.kts b/settings.gradle.kts index a36fde93cd3..8b561cff043 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -56,7 +56,7 @@ if (gradle.startParameter.projectProperties["enableFuse"]?.toBoolean() ?: false) } include("iceberg:iceberg-common") include("iceberg:iceberg-rest-server") -include("authorizations:authorization-ranger") +include("authorizations:authorization-ranger", "authorizations:authorization-chain") include("trino-connector:trino-connector", "trino-connector:integration-test") include("spark-connector:spark-common") // kyuubi hive connector doesn't support 2.13 for Spark3.3