diff --git a/.github/workflows/access-control-integration-test.yml b/.github/workflows/access-control-integration-test.yml index 54ffde2ee82..dc8acd60678 100644 --- a/.github/workflows/access-control-integration-test.yml +++ b/.github/workflows/access-control-integration-test.yml @@ -87,9 +87,9 @@ jobs: - name: Authorization Integration Test (JDK${{ matrix.java-version }}) id: integrationTest run: | - ./gradlew -PtestMode=embedded -PjdbcBackend=h2 -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :authorizations:authorization-ranger:test - ./gradlew -PtestMode=deploy -PjdbcBackend=mysql -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :authorizations:authorization-ranger:test - ./gradlew -PtestMode=deploy -PjdbcBackend=postgresql -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :authorizations:authorization-ranger:test + ./gradlew -PtestMode=embedded -PjdbcBackend=h2 -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :authorizations:test + ./gradlew -PtestMode=deploy -PjdbcBackend=mysql -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :authorizations:test + ./gradlew -PtestMode=deploy -PjdbcBackend=postgresql -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :authorizations:test - name: Upload integrate tests reports uses: actions/upload-artifact@v3 diff --git a/authorizations/authorization-chain/build.gradle.kts b/authorizations/authorization-chain/build.gradle.kts new file mode 100644 index 00000000000..1dae6d91b95 --- /dev/null +++ b/authorizations/authorization-chain/build.gradle.kts @@ -0,0 +1,146 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +description = "authorization-chain" + +plugins { + `maven-publish` + id("java") + id("idea") +} + +val scalaVersion: String = project.properties["scalaVersion"] as? String ?: extra["defaultScalaVersion"].toString() +val sparkVersion: String = libs.versions.spark35.get() +val kyuubiVersion: String = libs.versions.kyuubi4paimon.get() +val sparkMajorVersion: String = sparkVersion.substringBeforeLast(".") + +dependencies { + implementation(project(":api")) { + exclude(group = "*") + } + implementation(project(":core")) { + exclude(group = "*") + } + implementation(project(":common")) { + exclude(group = "*") + } + implementation(project(":authorizations:authorization-common")) { + exclude(group = "*") + } + implementation(libs.bundles.log4j) + implementation(libs.commons.lang3) + implementation(libs.guava) + implementation(libs.javax.jaxb.api) { + exclude("*") + } + implementation(libs.javax.ws.rs.api) + implementation(libs.jettison) + compileOnly(libs.lombok) + implementation(libs.rome) + + testImplementation(project(":core")) + testImplementation(project(":clients:client-java")) + testImplementation(project(":server")) + testImplementation(project(":catalogs:catalog-common")) + testImplementation(project(":integration-test-common", "testArtifacts")) + testImplementation(project(":authorizations:authorization-ranger")) + testImplementation(project(":authorizations:authorization-ranger", "testArtifacts")) + testImplementation(libs.junit.jupiter.api) + testImplementation(libs.mockito.core) + testImplementation(libs.testcontainers) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.mysql.driver) + testImplementation(libs.postgresql.driver) + testImplementation(libs.ranger.intg) { + exclude("org.apache.hadoop", "hadoop-common") + exclude("org.apache.hive", "hive-storage-api") + exclude("org.apache.lucene") + exclude("org.apache.solr") + exclude("org.apache.kafka") + exclude("org.elasticsearch") + exclude("org.elasticsearch.client") + exclude("org.elasticsearch.plugin") + exclude("org.apache.ranger", "ranger-plugins-audit") + exclude("org.apache.ranger", "ranger-plugins-cred") + exclude("org.apache.ranger", "ranger-plugin-classloader") + exclude("net.java.dev.jna") + exclude("javax.ws.rs") + exclude("org.eclipse.jetty") + } + testImplementation("org.apache.spark:spark-hive_$scalaVersion:$sparkVersion") + testImplementation("org.apache.spark:spark-sql_$scalaVersion:$sparkVersion") { + exclude("org.apache.avro") + exclude("org.apache.hadoop") + exclude("org.apache.zookeeper") + exclude("io.dropwizard.metrics") + exclude("org.rocksdb") + } + testImplementation("org.apache.kyuubi:kyuubi-spark-authz-shaded_$scalaVersion:$kyuubiVersion") { + exclude("com.sun.jersey") + } + testImplementation(libs.hadoop3.client) + testImplementation(libs.hadoop3.common) { + exclude("com.sun.jersey") + exclude("javax.servlet", "servlet-api") + } + testImplementation(libs.hadoop3.hdfs) { + exclude("com.sun.jersey") + exclude("javax.servlet", "servlet-api") + exclude("io.netty") + } +} + +tasks { + val runtimeJars by registering(Copy::class) { + from(configurations.runtimeClasspath) + into("build/libs") + } + + val copyAuthorizationLibs by registering(Copy::class) { + dependsOn("jar", runtimeJars) + from("build/libs") { + exclude("guava-*.jar") + exclude("log4j-*.jar") + exclude("slf4j-*.jar") + } + into("$rootDir/distribution/package/authorizations/chain/libs") + } + + register("copyLibAndConfig", Copy::class) { + dependsOn(copyAuthorizationLibs) + } + + jar { + dependsOn(runtimeJars) + } +} + +tasks.test { + doFirst { + environment("HADOOP_USER_NAME", "gravitino") + } + dependsOn(":catalogs:catalog-hive:jar", ":catalogs:catalog-hive:runtimeJars", ":authorizations:authorization-ranger:jar", ":authorizations:authorization-ranger:runtimeJars") + + val skipITs = project.hasProperty("skipITs") + if (skipITs) { + // Exclude integration tests + exclude("**/integration/test/**") + } else { + dependsOn(tasks.jar) + } +} diff --git a/core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorization.java b/authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorization.java similarity index 71% rename from core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorization.java rename to authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorization.java index e8d747da11f..12980497710 100644 --- a/core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorization.java +++ b/authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorization.java @@ -16,24 +16,27 @@ * specific language governing permissions and limitations * under the License. */ -package org.apache.gravitino.connector.authorization.mysql; +package org.apache.gravitino.authorization.chain; import java.util.Map; import org.apache.gravitino.connector.authorization.AuthorizationPlugin; import org.apache.gravitino.connector.authorization.BaseAuthorization; -public class TestMySQLAuthorization extends BaseAuthorization { - - public TestMySQLAuthorization() {} - +/** Implementation of a Chain authorization in Gravitino. */ +public class ChainAuthorization extends BaseAuthorization { @Override public String shortName() { - return "mysql"; + return "chain"; } @Override public AuthorizationPlugin newPlugin( String metalake, String catalogProvider, Map config) { - return new TestMySQLAuthorizationPlugin(); + switch (catalogProvider) { + case "hive": + return new ChainAuthorizationPlugin(metalake, catalogProvider, config); + default: + throw new IllegalArgumentException("Unknown catalog provider: " + catalogProvider); + } } } diff --git a/authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorizationPlugin.java b/authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorizationPlugin.java new file mode 100644 index 00000000000..d0e13942588 --- /dev/null +++ b/authorizations/authorization-chain/src/main/java/org/apache/gravitino/authorization/chain/ChainAuthorizationPlugin.java @@ -0,0 +1,291 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.chain; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import org.apache.gravitino.Catalog; +import org.apache.gravitino.MetadataObject; +import org.apache.gravitino.authorization.ChainAuthorizationProperties; +import org.apache.gravitino.authorization.Group; +import org.apache.gravitino.authorization.MetadataObjectChange; +import org.apache.gravitino.authorization.Owner; +import org.apache.gravitino.authorization.RangerAuthorizationProperties; +import org.apache.gravitino.authorization.Role; +import org.apache.gravitino.authorization.RoleChange; +import org.apache.gravitino.authorization.User; +import org.apache.gravitino.connector.authorization.AuthorizationPlugin; +import org.apache.gravitino.connector.authorization.BaseAuthorization; +import org.apache.gravitino.exceptions.AuthorizationPluginException; +import org.apache.gravitino.utils.IsolatedClassLoader; + +/** Chain authorization operations plugin class.
*/ +public class ChainAuthorizationPlugin implements AuthorizationPlugin { + private List plugins = Lists.newArrayList(); + private final String metalake; + + public ChainAuthorizationPlugin( + String metalake, String catalogProvider, Map config) { + this.metalake = metalake; + initPlugins(catalogProvider, config); + } + + private void initPlugins(String catalogProvider, Map properties) { + ChainAuthorizationProperties.validate(properties); + // Validate the properties for each plugin + ChainAuthorizationProperties.plugins(properties) + .forEach( + pluginName -> { + Map pluginProperties = + ChainAuthorizationProperties.fetchAuthPluginProperties(pluginName, properties); + String authProvider = + ChainAuthorizationProperties.getPluginProvider(pluginName, properties); + if ("ranger".equals(authProvider)) { + RangerAuthorizationProperties.validate(pluginProperties); + } else { + throw new IllegalArgumentException("Unsupported provider: " + authProvider); + } + }); + // Create the plugins + ChainAuthorizationProperties.plugins(properties) + .forEach( + pluginName -> { + String authProvider = + ChainAuthorizationProperties.getPluginProvider(pluginName, properties); + Map pluginConfig = + ChainAuthorizationProperties.fetchAuthPluginProperties(pluginName, properties); + + ArrayList libAndResourcesPaths = Lists.newArrayList(); + BaseAuthorization.buildAuthorizationPkgPath( + ImmutableMap.of(Catalog.AUTHORIZATION_PROVIDER, authProvider)) + .ifPresent(libAndResourcesPaths::add); + IsolatedClassLoader classLoader = + IsolatedClassLoader.buildClassLoader(libAndResourcesPaths); + try { + BaseAuthorization authorization = + BaseAuthorization.createAuthorization(classLoader, authProvider); + AuthorizationPlugin authorizationPlugin = + authorization.newPlugin(metalake, catalogProvider, pluginConfig); + plugins.add(authorizationPlugin); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @VisibleForTesting + public final List getPlugins() { + return plugins; + } + + @Override + public void close() throws IOException { + for (AuthorizationPlugin plugin : plugins) { + plugin.close(); + } + } + + @Override + public Boolean onMetadataUpdated(MetadataObjectChange... changes) + throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onMetadataUpdated(changes); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onRoleCreated(Role role) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onRoleCreated(role); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onRoleAcquired(Role role) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onRoleAcquired(role); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onRoleDeleted(Role role) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onRoleDeleted(role); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onRoleUpdated(Role role, RoleChange... changes) + throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onRoleUpdated(role, changes); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onGrantedRolesToUser(List roles, User user) + throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onGrantedRolesToUser(roles, user); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onRevokedRolesFromUser(List roles, User user) + throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onRevokedRolesFromUser(roles, user); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onGrantedRolesToGroup(List roles, Group group) + throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onGrantedRolesToGroup(roles, group); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onRevokedRolesFromGroup(List roles, Group group) + throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onRevokedRolesFromGroup(roles, group); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onUserAdded(User user) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onUserAdded(user); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onUserRemoved(User user) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onUserRemoved(user); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onUserAcquired(User user) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onUserAcquired(user); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onGroupAdded(Group group) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onGroupAdded(group); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onGroupRemoved(Group group) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onGroupRemoved(group); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onGroupAcquired(Group group) throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onGroupAcquired(group); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } + + @Override + public Boolean onOwnerSet(MetadataObject metadataObject, Owner preOwner, Owner newOwner) + throws AuthorizationPluginException { + for (AuthorizationPlugin plugin : plugins) { + Boolean result = plugin.onOwnerSet(metadataObject, preOwner, newOwner); + if (Boolean.FALSE.equals(result)) { + return Boolean.FALSE; + } + } + return Boolean.TRUE; + } +} diff --git a/authorizations/authorization-chain/src/main/resources/META-INF/services/org.apache.gravitino.connector.authorization.AuthorizationProvider b/authorizations/authorization-chain/src/main/resources/META-INF/services/org.apache.gravitino.connector.authorization.AuthorizationProvider new file mode 100644 index 00000000000..c4b35cb24df --- /dev/null +++ b/authorizations/authorization-chain/src/main/resources/META-INF/services/org.apache.gravitino.connector.authorization.AuthorizationProvider @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +org.apache.gravitino.authorization.chain.ChainAuthorization \ No newline at end of file diff --git a/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/integration/test/TestChainAuthorizationIT.java b/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/integration/test/TestChainAuthorizationIT.java new file mode 100644 index 00000000000..d278120e330 --- /dev/null +++ b/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/integration/test/TestChainAuthorizationIT.java @@ -0,0 +1,369 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.gravitino.authorization.chain.integration.test; + +import static org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.currentFunName; +import static org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE; + +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.apache.gravitino.Catalog; +import org.apache.gravitino.Configs; +import org.apache.gravitino.auth.AuthConstants; +import org.apache.gravitino.auth.AuthenticatorType; +import org.apache.gravitino.authorization.ChainAuthorizationProperties; +import org.apache.gravitino.authorization.Privileges; +import org.apache.gravitino.authorization.SecurableObject; +import org.apache.gravitino.authorization.SecurableObjects; +import org.apache.gravitino.authorization.ranger.integration.test.RangerBaseE2EIT; +import org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv; +import org.apache.gravitino.catalog.hive.HiveConstants; +import org.apache.gravitino.exceptions.UserAlreadyExistsException; +import org.apache.gravitino.integration.test.container.HiveContainer; +import org.apache.gravitino.integration.test.container.RangerContainer; +import org.apache.gravitino.integration.test.util.BaseIT; +import org.apache.gravitino.integration.test.util.GravitinoITUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.kyuubi.plugin.spark.authz.AccessControlException; +import org.apache.spark.sql.SparkSession; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TestChainAuthorizationIT extends RangerBaseE2EIT { + private static final Logger LOG = LoggerFactory.getLogger(TestChainAuthorizationIT.class); + private static String DEFAULT_FS; + private FileSystem fileSystem; + + @BeforeAll + public void startIntegrationTest() throws Exception { + metalakeName = GravitinoITUtils.genRandomName("metalake").toLowerCase(); + // Enable Gravitino Authorization mode + Map configs = Maps.newHashMap(); + configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true)); + configs.put(Configs.SERVICE_ADMINS.getKey(), RangerITEnv.HADOOP_USER_NAME); + configs.put(Configs.AUTHENTICATORS.getKey(), AuthenticatorType.SIMPLE.name().toLowerCase()); + configs.put("SimpleAuthUserName", AuthConstants.ANONYMOUS_USER); + registerCustomConfigs(configs); + + super.startIntegrationTest(); + RangerITEnv.init(RangerBaseE2EIT.metalakeName, false); + RangerITEnv.startHiveRangerContainer(); + + HIVE_METASTORE_URIS = + String.format( + "thrift://%s:%d", + containerSuite.getHiveRangerContainer().getContainerIpAddress(), + HiveContainer.HIVE_METASTORE_PORT); + + generateRangerSparkSecurityXML("authorization-chain"); + + DEFAULT_FS = + String.format( + "hdfs://%s:%d/user/hive/warehouse", + containerSuite.getHiveRangerContainer().getContainerIpAddress(), + HiveContainer.HDFS_DEFAULTFS_PORT); + BaseIT.runInEnv( + "HADOOP_USER_NAME", + AuthConstants.ANONYMOUS_USER, + () -> { + sparkSession = + SparkSession.builder() + .master("local[1]") + .appName("Ranger Hive E2E integration test") + .config("hive.metastore.uris", HIVE_METASTORE_URIS) + .config("spark.sql.warehouse.dir", DEFAULT_FS) + .config("spark.sql.storeAssignmentPolicy", "LEGACY") + .config("mapreduce.input.fileinputformat.input.dir.recursive", "true") + .config( + "spark.sql.extensions", + "org.apache.kyuubi.plugin.spark.authz.ranger.RangerSparkExtension") + .enableHiveSupport() + .getOrCreate(); + sparkSession.sql(SQL_SHOW_DATABASES); // must be called to activate the Spark session + }); + createMetalake(); + createCatalog(); + + Configuration conf = new Configuration(); + conf.set("fs.defaultFS", DEFAULT_FS); + fileSystem = FileSystem.get(conf); + + RangerITEnv.cleanup(); + try { + metalake.addUser(System.getenv(HADOOP_USER_NAME)); + } catch (UserAlreadyExistsException e) { + LOG.error("Failed to add user: {}", System.getenv(HADOOP_USER_NAME), e); + } + } + + @AfterAll + public void stop() throws IOException { + if (client != null) { + Arrays.stream(catalog.asSchemas().listSchemas()) + .filter(schema -> !schema.equals("default")) + .forEach( + (schema -> { + catalog.asSchemas().dropSchema(schema, false); + })); + Arrays.stream(metalake.listCatalogs()) + .forEach((catalogName -> metalake.dropCatalog(catalogName, true))); + client.disableMetalake(metalakeName); + client.dropMetalake(metalakeName); + } + if (fileSystem != null) { + fileSystem.close(); + } + try { + closer.close(); + } catch (Exception e) { + LOG.error("Failed to close CloseableGroup", e); + } + client = null; + RangerITEnv.cleanup(); + } + + private String storageLocation(String dirName) { + return DEFAULT_FS + "/" + dirName; + } + + @Test + public void testCreateSchema() throws IOException { + // Choose a catalog + useCatalog(); + + // First, fail to create the schema + Exception accessControlException = + Assertions.assertThrows(Exception.class, () -> sparkSession.sql(SQL_CREATE_SCHEMA)); + Assertions.assertTrue( + accessControlException + .getMessage() + .contains( + String.format( + "Permission denied: user [%s] does not have [create] privilege", + AuthConstants.ANONYMOUS_USER)) + || accessControlException + .getMessage() + .contains( + String.format( + "Permission denied: user=%s, access=WRITE", AuthConstants.ANONYMOUS_USER))); + Path schemaPath = new Path(storageLocation(schemaName + ".db")); + Assertions.assertFalse(fileSystem.exists(schemaPath)); + FileStatus fileStatus = fileSystem.getFileStatus(new Path(DEFAULT_FS)); + Assertions.assertEquals(System.getenv(HADOOP_USER_NAME), fileStatus.getOwner()); + + // Second, grant the `CREATE_SCHEMA` role + String roleName = currentFunName(); + SecurableObject securableObject = + SecurableObjects.ofCatalog( + catalogName, Lists.newArrayList(Privileges.CreateSchema.allow())); + metalake.createRole(roleName, Collections.emptyMap(), Lists.newArrayList(securableObject)); + metalake.grantRolesToUser(Lists.newArrayList(roleName), AuthConstants.ANONYMOUS_USER); + waitForUpdatingPolicies(); + + // Third, succeed to create the schema + sparkSession.sql(SQL_CREATE_SCHEMA); + Assertions.assertTrue(fileSystem.exists(schemaPath)); + FileStatus fsSchema = fileSystem.getFileStatus(schemaPath); + Assertions.assertEquals(AuthConstants.ANONYMOUS_USER, fsSchema.getOwner()); + + // Fourth, fail to create the table + Assertions.assertThrows(AccessControlException.class, () -> sparkSession.sql(SQL_CREATE_TABLE)); + + // Clean up + catalog.asSchemas().dropSchema(schemaName, false); + metalake.deleteRole(roleName); + waitForUpdatingPolicies(); + + Exception accessControlException2 = + Assertions.assertThrows(Exception.class, () -> sparkSession.sql(SQL_CREATE_SCHEMA)); + Assertions.assertTrue( + accessControlException2 + .getMessage() + .contains( + String.format( + "Permission denied: user [%s] does not have [create] privilege", + AuthConstants.ANONYMOUS_USER)) + || accessControlException2 + .getMessage() + .contains( + String.format( + "Permission denied: user=%s, access=WRITE", AuthConstants.ANONYMOUS_USER))); + } + + @Override + public void createCatalog() { + Map catalogConf = new HashMap<>(); + catalogConf.put(HiveConstants.METASTORE_URIS, HIVE_METASTORE_URIS); + catalogConf.put(IMPERSONATION_ENABLE, "true"); + catalogConf.put(Catalog.AUTHORIZATION_PROVIDER, "chain"); + catalogConf.put(ChainAuthorizationProperties.CHAIN_PLUGINS_PROPERTIES_KEY, "hive1,hdfs1"); + catalogConf.put("authorization.chain.hive1.provider", "ranger"); + catalogConf.put("authorization.chain.hive1.ranger.auth.type", RangerContainer.authType); + catalogConf.put("authorization.chain.hive1.ranger.admin.url", RangerITEnv.RANGER_ADMIN_URL); + catalogConf.put("authorization.chain.hive1.ranger.username", RangerContainer.rangerUserName); + catalogConf.put("authorization.chain.hive1.ranger.password", RangerContainer.rangerPassword); + catalogConf.put("authorization.chain.hive1.ranger.service.type", "HadoopSQL"); + catalogConf.put( + "authorization.chain.hive1.ranger.service.name", RangerITEnv.RANGER_HIVE_REPO_NAME); + catalogConf.put("authorization.chain.hdfs1.provider", "ranger"); + catalogConf.put("authorization.chain.hdfs1.ranger.auth.type", RangerContainer.authType); + catalogConf.put("authorization.chain.hdfs1.ranger.admin.url", RangerITEnv.RANGER_ADMIN_URL); + catalogConf.put("authorization.chain.hdfs1.ranger.username", RangerContainer.rangerUserName); + catalogConf.put("authorization.chain.hdfs1.ranger.password", RangerContainer.rangerPassword); + catalogConf.put("authorization.chain.hdfs1.ranger.service.type", "HDFS"); + catalogConf.put( + "authorization.chain.hdfs1.ranger.service.name", RangerITEnv.RANGER_HDFS_REPO_NAME); + + metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, "hive", "comment", catalogConf); + catalog = metalake.loadCatalog(catalogName); + LOG.info("Catalog created: {}", catalog); + } + + @Test + void testCreateTable() throws InterruptedException { + // TODO + } + + @Test + void testReadWriteTableWithMetalakeLevelRole() throws InterruptedException { + // TODO + } + + @Test + void testReadWriteTableWithTableLevelRole() throws InterruptedException { + // TODO + } + + @Test + void testReadOnlyTable() throws InterruptedException { + // TODO + } + + @Test + void testWriteOnlyTable() throws InterruptedException { + // TODO + } + + @Test + void testCreateAllPrivilegesRole() throws InterruptedException { + // TODO + } + + @Test + void testDeleteAndRecreateRole() throws InterruptedException { + // TODO + } + + @Test + void testDeleteAndRecreateMetadataObject() throws InterruptedException { + // TODO + } + + @Test + void testRenameMetadataObject() throws InterruptedException { + // TODO + } + + @Test + void testRenameMetadataObjectPrivilege() throws InterruptedException { + // TODO + } + + @Test + void testChangeOwner() throws InterruptedException { + // TODO + } + + @Test + void testAllowUseSchemaPrivilege() throws InterruptedException { + // TODO + } + + @Test + void testDenyPrivileges() throws InterruptedException { + // TODO + } + + @Test + void testGrantPrivilegesForMetalake() throws InterruptedException { + // TODO + } + + @Override + protected void checkTableAllPrivilegesExceptForCreating() { + // TODO + } + + @Override + protected void checkUpdateSQLWithReadWritePrivileges() { + // TODO + } + + @Override + protected void checkUpdateSQLWithReadPrivileges() { + // TODO + } + + @Override + protected void checkUpdateSQLWithWritePrivileges() { + // TODO + } + + @Override + protected void checkDeleteSQLWithReadWritePrivileges() { + // TODO + } + + @Override + protected void checkDeleteSQLWithReadPrivileges() { + // TODO + } + + @Override + protected void checkDeleteSQLWithWritePrivileges() { + // TODO + } + + @Override + protected void useCatalog() { + // TODO + } + + @Override + protected void checkWithoutPrivileges() { + // TODO + } + + @Override + protected void testAlterTable() { + // TODO + } +} diff --git a/authorizations/authorization-chain/src/test/resources/log4j2.properties b/authorizations/authorization-chain/src/test/resources/log4j2.properties new file mode 100644 index 00000000000..2a46c57ec2f --- /dev/null +++ b/authorizations/authorization-chain/src/test/resources/log4j2.properties @@ -0,0 +1,73 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Set to debug or trace if log4j initialization is failing +status = info + +# Name of the configuration +name = ConsoleLogConfig + +# Console appender configuration +appender.console.type = Console +appender.console.name = consoleLogger +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %-5p [%t] %c{1}:%L - %m%n + +# Log files location +property.logPath = ${sys:gravitino.log.path:-build/authorization-chain-integration-test.log} + +# File appender configuration +appender.file.type = File +appender.file.name = fileLogger +appender.file.fileName = ${logPath} +appender.file.layout.type = PatternLayout +appender.file.layout.pattern = %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5p %c - %m%n + +# Root logger level +rootLogger.level = info + +# Root logger referring to console and file appenders +rootLogger.appenderRef.stdout.ref = consoleLogger +rootLogger.appenderRef.file.ref = fileLogger + +# File appender configuration for testcontainers +appender.testcontainersFile.type = File +appender.testcontainersFile.name = testcontainersLogger +appender.testcontainersFile.fileName = build/testcontainers.log +appender.testcontainersFile.layout.type = PatternLayout +appender.testcontainersFile.layout.pattern = %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5p %c - %m%n + +# Logger for testcontainers +logger.testcontainers.name = org.testcontainers +logger.testcontainers.level = debug +logger.testcontainers.additivity = false +logger.testcontainers.appenderRef.file.ref = testcontainersLogger + +logger.tc.name = tc +logger.tc.level = debug +logger.tc.additivity = false +logger.tc.appenderRef.file.ref = testcontainersLogger + +logger.docker.name = com.github.dockerjava +logger.docker.level = warn +logger.docker.additivity = false +logger.docker.appenderRef.file.ref = testcontainersLogger + +logger.http.name = com.github.dockerjava.zerodep.shaded.org.apache.hc.client5.http.wire +logger.http.level = off diff --git a/authorizations/authorization-chain/src/test/resources/ranger-spark-security.xml.template b/authorizations/authorization-chain/src/test/resources/ranger-spark-security.xml.template new file mode 100644 index 00000000000..eb7f2b5e811 --- /dev/null +++ b/authorizations/authorization-chain/src/test/resources/ranger-spark-security.xml.template @@ -0,0 +1,45 @@ + + + + ranger.plugin.spark.policy.rest.url + __REPLACE__RANGER_ADMIN_URL + + + + ranger.plugin.spark.service.name + __REPLACE__RANGER_HIVE_REPO_NAME + + + + ranger.plugin.spark.policy.cache.dir + /tmp/policycache + + + + ranger.plugin.spark.policy.pollIntervalMs + 500 + + + + ranger.plugin.spark.policy.source.impl + org.apache.ranger.admin.client.RangerAdminRESTClient + + + \ No newline at end of file diff --git a/authorizations/authorization-common/build.gradle.kts b/authorizations/authorization-common/build.gradle.kts new file mode 100644 index 00000000000..f32e895cd7b --- /dev/null +++ b/authorizations/authorization-common/build.gradle.kts @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +description = "authorization-chain" + +plugins { + `maven-publish` + id("java") + id("idea") +} + +dependencies { + implementation(project(":api")) { + exclude(group = "*") + } + implementation(project(":core")) { + exclude(group = "*") + } + implementation(project(":common")) { + exclude(group = "*") + } + implementation(libs.bundles.log4j) + implementation(libs.commons.lang3) + implementation(libs.guava) + implementation(libs.javax.jaxb.api) { + exclude("*") + } + implementation(libs.javax.ws.rs.api) + implementation(libs.jettison) + compileOnly(libs.lombok) + implementation(libs.rome) + + testImplementation(libs.junit.jupiter.api) + testImplementation(libs.mockito.core) + testImplementation(libs.testcontainers) + testRuntimeOnly(libs.junit.jupiter.engine) +} + +tasks { + val runtimeJars by registering(Copy::class) { + from(configurations.runtimeClasspath) + into("build/libs") + } + + jar { + dependsOn(runtimeJars) + } +} + +tasks.test { + val skipITs = project.hasProperty("skipITs") + if (skipITs) { + // Exclude integration tests + exclude("**/integration/test/**") + } else { + dependsOn(tasks.jar) + } +} diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/ChainAuthorizationProperties.java b/authorizations/authorization-common/src/main/java/org/apache/gravitino/authorization/ChainAuthorizationProperties.java similarity index 91% rename from authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/ChainAuthorizationProperties.java rename to authorizations/authorization-common/src/main/java/org/apache/gravitino/authorization/ChainAuthorizationProperties.java index edaa375747a..56fd04f143b 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/ChainAuthorizationProperties.java +++ b/authorizations/authorization-common/src/main/java/org/apache/gravitino/authorization/ChainAuthorizationProperties.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.apache.gravitino.authorization.ranger; +package org.apache.gravitino.authorization; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; @@ -49,14 +49,24 @@ * "authorization.chain.hdfs1.ranger.password" = "admin";
*/ public class ChainAuthorizationProperties { - public static final String PLUGINS_SPLITTER = ","; + private static final String PLUGINS_SPLITTER = ","; /** Chain authorization plugin names */ public static final String CHAIN_PLUGINS_PROPERTIES_KEY = "authorization.chain.plugins"; /** Chain authorization plugin provider */ public static final String CHAIN_PROVIDER = "authorization.chain.*.provider"; - static Map fetchAuthPluginProperties( + public static final String PREFIX = "authorization.chain"; + + public static String getPluginProvider(String pluginName, Map properties) { + return properties.get(PREFIX + "." + pluginName + ".provider"); + } + + public static List plugins(Map properties) { + return Arrays.asList(properties.get(CHAIN_PLUGINS_PROPERTIES_KEY).split(PLUGINS_SPLITTER)); + } + + public static Map fetchAuthPluginProperties( String pluginName, Map properties) { Preconditions.checkArgument( properties.containsKey(CHAIN_PLUGINS_PROPERTIES_KEY) @@ -100,6 +110,7 @@ public static void validate(Map properties) { List pluginNames = Arrays.stream(properties.get(CHAIN_PLUGINS_PROPERTIES_KEY).split(PLUGINS_SPLITTER)) .map(String::trim) + .filter(v -> !v.isEmpty()) .collect(Collectors.toList()); Preconditions.checkArgument( !pluginNames.isEmpty(), diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationProperties.java b/authorizations/authorization-common/src/main/java/org/apache/gravitino/authorization/RangerAuthorizationProperties.java similarity index 98% rename from authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationProperties.java rename to authorizations/authorization-common/src/main/java/org/apache/gravitino/authorization/RangerAuthorizationProperties.java index e7fee3088f6..3f3096a9183 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationProperties.java +++ b/authorizations/authorization-common/src/main/java/org/apache/gravitino/authorization/RangerAuthorizationProperties.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.apache.gravitino.authorization.ranger; +package org.apache.gravitino.authorization; import com.google.common.base.Preconditions; import java.util.Map; diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestChainAuthorizationProperties.java b/authorizations/authorization-common/src/test/java/org/apache/gravitino/authorization/TestChainAuthorizationProperties.java similarity index 91% rename from authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestChainAuthorizationProperties.java rename to authorizations/authorization-common/src/test/java/org/apache/gravitino/authorization/TestChainAuthorizationProperties.java index 5d19f234093..9ffa075ebf1 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestChainAuthorizationProperties.java +++ b/authorizations/authorization-common/src/test/java/org/apache/gravitino/authorization/TestChainAuthorizationProperties.java @@ -16,19 +16,20 @@ * specific language governing permissions and limitations * under the License. */ -package org.apache.gravitino.authorization.ranger; +package org.apache.gravitino.authorization; import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER; -import static org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE; import com.google.common.collect.Maps; import java.util.HashMap; import java.util.Map; -import org.apache.gravitino.catalog.hive.HiveConstants; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; public class TestChainAuthorizationProperties { + static final String METASTORE_URIS = "metastore.uris"; + public static final String IMPERSONATION_ENABLE = "impersonation-enable"; + @Test void testChainOnePlugin() { Map properties = Maps.newHashMap(); @@ -46,7 +47,7 @@ void testChainOnePlugin() { @Test void testChainTwoPlugins() { Map properties = new HashMap<>(); - properties.put(HiveConstants.METASTORE_URIS, "thrift://localhost:9083"); + properties.put(METASTORE_URIS, "thrift://localhost:9083"); properties.put("gravitino.bypass.hive.metastore.client.capability.check", "true"); properties.put(IMPERSONATION_ENABLE, "true"); properties.put(AUTHORIZATION_PROVIDER, "chain"); @@ -68,6 +69,21 @@ void testChainTwoPlugins() { Assertions.assertDoesNotThrow(() -> ChainAuthorizationProperties.validate(properties)); } + @Test + void testWithoutChains() { + Map properties = Maps.newHashMap(); + properties.put("authorization.chain.plugins", ""); + properties.put("authorization.chain.hive1.provider", "ranger"); + properties.put("authorization.chain.hive1.ranger.auth.type", "simple"); + properties.put("authorization.chain.hive1.ranger.admin.url", "http://localhost:6080"); + properties.put("authorization.chain.hive1.ranger.username", "admin"); + properties.put("authorization.chain.hive1.ranger.password", "admin"); + properties.put("authorization.chain.hive1.ranger.service.type", "hive"); + properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev"); + Assertions.assertThrows( + IllegalArgumentException.class, () -> ChainAuthorizationProperties.validate(properties)); + } + @Test void testPluginsHasSpace() { Map properties = Maps.newHashMap(); @@ -180,7 +196,7 @@ void testDuplicationPluginName() { @Test void testFetchRangerPrpoerties() { Map properties = new HashMap<>(); - properties.put(HiveConstants.METASTORE_URIS, "thrift://localhost:9083"); + properties.put(METASTORE_URIS, "thrift://localhost:9083"); properties.put("gravitino.bypass.hive.metastore.client.capability.check", "true"); properties.put(IMPERSONATION_ENABLE, "true"); properties.put(AUTHORIZATION_PROVIDER, "chain"); diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestRangerAuthorizationProperties.java b/authorizations/authorization-common/src/test/java/org/apache/gravitino/authorization/TestRangerAuthorizationProperties.java similarity index 98% rename from authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestRangerAuthorizationProperties.java rename to authorizations/authorization-common/src/test/java/org/apache/gravitino/authorization/TestRangerAuthorizationProperties.java index a90b164a21f..aeb3683c574 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestRangerAuthorizationProperties.java +++ b/authorizations/authorization-common/src/test/java/org/apache/gravitino/authorization/TestRangerAuthorizationProperties.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.apache.gravitino.authorization.ranger; +package org.apache.gravitino.authorization; import com.google.common.collect.Maps; import java.util.Map; diff --git a/authorizations/authorization-ranger/build.gradle.kts b/authorizations/authorization-ranger/build.gradle.kts index a335e492b31..d21766e319b 100644 --- a/authorizations/authorization-ranger/build.gradle.kts +++ b/authorizations/authorization-ranger/build.gradle.kts @@ -38,7 +38,12 @@ dependencies { implementation(project(":core")) { exclude(group = "*") } - + implementation(project(":catalogs:catalog-common")) { + exclude(group = "*") + } + implementation(project(":authorizations:authorization-common")) { + exclude(group = "*") + } implementation(libs.bundles.log4j) implementation(libs.commons.lang3) implementation(libs.guava) @@ -50,7 +55,6 @@ dependencies { compileOnly(libs.lombok) implementation(libs.mail) implementation(libs.ranger.intg) { - exclude("org.apache.hadoop", "hadoop-common") exclude("org.apache.hive", "hive-storage-api") exclude("org.apache.lucene") exclude("org.apache.solr") @@ -70,7 +74,7 @@ dependencies { testImplementation(project(":common")) testImplementation(project(":clients:client-java")) testImplementation(project(":server")) - testImplementation(project(":catalogs:catalog-common")) + testImplementation(project(":integration-test-common", "testArtifacts")) testImplementation(libs.junit.jupiter.api) testImplementation(libs.mockito.core) @@ -143,3 +147,16 @@ tasks.test { dependsOn(tasks.jar) } } + +val testJar by tasks.registering(Jar::class) { + archiveClassifier.set("tests") + from(sourceSets["test"].output) +} + +configurations { + create("testArtifacts") +} + +artifacts { + add("testArtifacts", testJar) +} diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java index 6aae714a359..f1f4934d8f2 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java +++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java @@ -18,10 +18,9 @@ */ package org.apache.gravitino.authorization.ranger; -import static org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties.RANGER_SERVICE_TYPE; - import com.google.common.base.Preconditions; import java.util.Map; +import org.apache.gravitino.authorization.RangerAuthorizationProperties; import org.apache.gravitino.connector.authorization.AuthorizationPlugin; import org.apache.gravitino.connector.authorization.BaseAuthorization; @@ -36,9 +35,10 @@ public String shortName() { public AuthorizationPlugin newPlugin( String metalake, String catalogProvider, Map properties) { Preconditions.checkArgument( - properties.containsKey(RANGER_SERVICE_TYPE), - String.format("%s is required", RANGER_SERVICE_TYPE)); - String serviceType = properties.get(RANGER_SERVICE_TYPE).toUpperCase(); + properties.containsKey(RangerAuthorizationProperties.RANGER_SERVICE_TYPE), + String.format("%s is required", RangerAuthorizationProperties.RANGER_SERVICE_TYPE)); + String serviceType = + properties.get(RangerAuthorizationProperties.RANGER_SERVICE_TYPE).toUpperCase(); switch (serviceType) { case "HADOOPSQL": return new RangerAuthorizationHadoopSQLPlugin(metalake, properties); diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java index 9afa77880e9..a97c3cd94d1 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java +++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java @@ -18,6 +18,7 @@ */ package org.apache.gravitino.authorization.ranger; +import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -30,9 +31,12 @@ import java.util.Objects; import java.util.Set; import java.util.regex.Pattern; +import org.apache.gravitino.Catalog; import org.apache.gravitino.GravitinoEnv; import org.apache.gravitino.MetadataObject; import org.apache.gravitino.NameIdentifier; +import org.apache.gravitino.Namespace; +import org.apache.gravitino.Schema; import org.apache.gravitino.authorization.AuthorizationMetadataObject; import org.apache.gravitino.authorization.AuthorizationPrivilege; import org.apache.gravitino.authorization.AuthorizationSecurableObject; @@ -41,15 +45,13 @@ import org.apache.gravitino.authorization.SecurableObjects; import org.apache.gravitino.authorization.ranger.reference.RangerDefines; import org.apache.gravitino.catalog.FilesetDispatcher; +import org.apache.gravitino.catalog.hive.HiveConstants; import org.apache.gravitino.exceptions.AuthorizationPluginException; +import org.apache.gravitino.exceptions.NoSuchEntityException; import org.apache.gravitino.file.Fileset; import org.apache.ranger.plugin.model.RangerPolicy; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; public class RangerAuthorizationHDFSPlugin extends RangerAuthorizationPlugin { - private static final Logger LOG = LoggerFactory.getLogger(RangerAuthorizationHDFSPlugin.class); - private static final Pattern pattern = Pattern.compile("^hdfs://[^/]*"); public RangerAuthorizationHDFSPlugin(String metalake, Map config) { @@ -59,6 +61,38 @@ public RangerAuthorizationHDFSPlugin(String metalake, Map config @Override public Map> privilegesMappingRule() { return ImmutableMap.of( + Privilege.Name.USE_CATALOG, + ImmutableSet.of( + RangerPrivileges.RangerHdfsPrivilege.READ, + RangerPrivileges.RangerHdfsPrivilege.EXECUTE), + Privilege.Name.CREATE_CATALOG, + ImmutableSet.of( + RangerPrivileges.RangerHdfsPrivilege.READ, + RangerPrivileges.RangerHdfsPrivilege.WRITE, + RangerPrivileges.RangerHdfsPrivilege.EXECUTE), + Privilege.Name.USE_SCHEMA, + ImmutableSet.of( + RangerPrivileges.RangerHdfsPrivilege.READ, + RangerPrivileges.RangerHdfsPrivilege.EXECUTE), + Privilege.Name.CREATE_SCHEMA, + ImmutableSet.of( + RangerPrivileges.RangerHdfsPrivilege.READ, + RangerPrivileges.RangerHdfsPrivilege.WRITE, + RangerPrivileges.RangerHdfsPrivilege.EXECUTE), + Privilege.Name.CREATE_TABLE, + ImmutableSet.of( + RangerPrivileges.RangerHdfsPrivilege.READ, + RangerPrivileges.RangerHdfsPrivilege.WRITE, + RangerPrivileges.RangerHdfsPrivilege.EXECUTE), + Privilege.Name.MODIFY_TABLE, + ImmutableSet.of( + RangerPrivileges.RangerHdfsPrivilege.READ, + RangerPrivileges.RangerHdfsPrivilege.WRITE, + RangerPrivileges.RangerHdfsPrivilege.EXECUTE), + Privilege.Name.SELECT_TABLE, + ImmutableSet.of( + RangerPrivileges.RangerHdfsPrivilege.READ, + RangerPrivileges.RangerHdfsPrivilege.EXECUTE), Privilege.Name.READ_FILESET, ImmutableSet.of( RangerPrivileges.RangerHdfsPrivilege.READ, @@ -99,9 +133,9 @@ public AuthorizationSecurableObject generateAuthorizationSecurableObject( AuthorizationMetadataObject.Type type, Set privileges) { AuthorizationMetadataObject authMetadataObject = - new RangerPathBaseMetadataObject(AuthorizationMetadataObject.getLastName(names), type); + new RangerHDFSMetadataObject(AuthorizationMetadataObject.getLastName(names), type); authMetadataObject.validateAuthorizationMetadataObject(); - return new RangerPathBaseSecurableObject( + return new RangerHDFSSecurableObject( authMetadataObject.name(), authMetadataObject.type(), privileges); } @@ -137,10 +171,52 @@ public List translatePrivilege(SecurableObject sec .forEach( rangerPrivilege -> rangerPrivileges.add( - new RangerPrivileges.RangerHivePrivilegeImpl( + new RangerPrivileges.RangerHDFSPrivilegeImpl( rangerPrivilege, gravitinoPrivilege.condition()))); - switch (gravitinoPrivilege.name()) { + case USE_CATALOG: + case CREATE_CATALOG: + // When HDFS is used as the Hive storage layer, Hive does not support the + // `USE_CATALOG` and `CREATE_CATALOG` privileges. So, we ignore these + // in the RangerAuthorizationHDFSPlugin. + break; + case USE_SCHEMA: + break; + case CREATE_SCHEMA: + switch (securableObject.type()) { + case METALAKE: + case CATALOG: + { + String locationPath = getLocationPath(securableObject); + if (locationPath != null && !locationPath.isEmpty()) { + RangerHDFSMetadataObject rangerHDFSMetadataObject = + new RangerHDFSMetadataObject( + locationPath, RangerHDFSMetadataObject.Type.PATH); + rangerSecurableObjects.add( + generateAuthorizationSecurableObject( + rangerHDFSMetadataObject.names(), + RangerHDFSMetadataObject.Type.PATH, + rangerPrivileges)); + } + } + break; + case FILESET: + rangerSecurableObjects.add( + generateAuthorizationSecurableObject( + translateMetadataObject(securableObject).names(), + RangerHDFSMetadataObject.Type.PATH, + rangerPrivileges)); + break; + default: + throw new AuthorizationPluginException( + "The privilege %s is not supported for the securable object: %s", + gravitinoPrivilege.name(), securableObject.type()); + } + break; + case SELECT_TABLE: + case CREATE_TABLE: + case MODIFY_TABLE: + break; case CREATE_FILESET: // Ignore the Gravitino privilege `CREATE_FILESET` in the // RangerAuthorizationHDFSPlugin @@ -156,7 +232,7 @@ public List translatePrivilege(SecurableObject sec rangerSecurableObjects.add( generateAuthorizationSecurableObject( translateMetadataObject(securableObject).names(), - RangerPathBaseMetadataObject.Type.PATH, + RangerHDFSMetadataObject.Type.PATH, rangerPrivileges)); break; default: @@ -166,10 +242,9 @@ public List translatePrivilege(SecurableObject sec } break; default: - LOG.warn( - "RangerAuthorizationHDFSPlugin -> privilege {} is not supported for the securable object: {}", - gravitinoPrivilege.name(), - securableObject.type()); + throw new AuthorizationPluginException( + "The privilege %s is not supported for the securable object: %s", + gravitinoPrivilege.name(), securableObject.type()); } }); @@ -183,12 +258,12 @@ public List translateOwner(MetadataObject gravitin case METALAKE: case CATALOG: case SCHEMA: - return rangerSecurableObjects; + break; case FILESET: rangerSecurableObjects.add( generateAuthorizationSecurableObject( translateMetadataObject(gravitinoMetadataObject).names(), - RangerPathBaseMetadataObject.Type.PATH, + RangerHDFSMetadataObject.Type.PATH, ownerMappingRule())); break; default: @@ -212,27 +287,77 @@ public AuthorizationMetadataObject translateMetadataObject(MetadataObject metada Preconditions.checkArgument( nsMetadataObject.size() > 0, "The metadata object must have at least one name."); - if (metadataObject.type() == MetadataObject.Type.FILESET) { - RangerPathBaseMetadataObject rangerHDFSMetadataObject = - new RangerPathBaseMetadataObject( - getFileSetPath(metadataObject), RangerPathBaseMetadataObject.Type.PATH); - rangerHDFSMetadataObject.validateAuthorizationMetadataObject(); - return rangerHDFSMetadataObject; - } else { - return new RangerPathBaseMetadataObject("", RangerPathBaseMetadataObject.Type.PATH); + RangerHDFSMetadataObject rangerHDFSMetadataObject; + switch (metadataObject.type()) { + case METALAKE: + case CATALOG: + rangerHDFSMetadataObject = + new RangerHDFSMetadataObject("", RangerHDFSMetadataObject.Type.PATH); + break; + case SCHEMA: + rangerHDFSMetadataObject = + new RangerHDFSMetadataObject( + metadataObject.fullName(), RangerHDFSMetadataObject.Type.PATH); + break; + case FILESET: + rangerHDFSMetadataObject = + new RangerHDFSMetadataObject( + getLocationPath(metadataObject), RangerHDFSMetadataObject.Type.PATH); + break; + default: + throw new AuthorizationPluginException( + "The metadata object type %s is not supported in the RangerAuthorizationHDFSPlugin", + metadataObject.type()); } + rangerHDFSMetadataObject.validateAuthorizationMetadataObject(); + return rangerHDFSMetadataObject; } - public String getFileSetPath(MetadataObject metadataObject) { - FilesetDispatcher filesetDispatcher = GravitinoEnv.getInstance().filesetDispatcher(); - NameIdentifier identifier = - NameIdentifier.parse(String.format("%s.%s", metalake, metadataObject.fullName())); - Fileset fileset = filesetDispatcher.loadFileset(identifier); - Preconditions.checkArgument( - fileset != null, String.format("Fileset %s is not found", identifier)); - String filesetLocation = fileset.storageLocation(); - Preconditions.checkArgument( - filesetLocation != null, String.format("Fileset %s location is not found", identifier)); - return pattern.matcher(filesetLocation).replaceAll(""); + private NameIdentifier getObjectNameIdentifier(MetadataObject metadataObject) { + return NameIdentifier.parse(String.format("%s.%s", metalake, metadataObject.fullName())); + } + + @VisibleForTesting + public String getLocationPath(MetadataObject metadataObject) throws NoSuchEntityException { + String locationPath = null; + switch (metadataObject.type()) { + case METALAKE: + case SCHEMA: + case TABLE: + break; + case CATALOG: + { + Namespace nsMetadataObj = Namespace.fromString(metadataObject.fullName()); + NameIdentifier ident = NameIdentifier.of(metalake, nsMetadataObj.level(0)); + Catalog catalog = GravitinoEnv.getInstance().catalogDispatcher().loadCatalog(ident); + if (catalog.provider().equals("hive")) { + Schema schema = + GravitinoEnv.getInstance() + .schemaDispatcher() + .loadSchema( + NameIdentifier.of( + metalake, nsMetadataObj.level(0), "default" /*Hive default schema*/)); + String defaultSchemaLocation = schema.properties().get(HiveConstants.LOCATION); + locationPath = pattern.matcher(defaultSchemaLocation).replaceAll(""); + } + } + break; + case FILESET: + FilesetDispatcher filesetDispatcher = GravitinoEnv.getInstance().filesetDispatcher(); + NameIdentifier identifier = getObjectNameIdentifier(metadataObject); + Fileset fileset = filesetDispatcher.loadFileset(identifier); + Preconditions.checkArgument( + fileset != null, String.format("Fileset %s is not found", identifier)); + String filesetLocation = fileset.storageLocation(); + Preconditions.checkArgument( + filesetLocation != null, String.format("Fileset %s location is not found", identifier)); + locationPath = pattern.matcher(filesetLocation).replaceAll(""); + break; + default: + throw new AuthorizationPluginException( + "The metadata object type %s is not supported in the RangerAuthorizationHDFSPlugin", + metadataObject.type()); + } + return locationPath; } } diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHadoopSQLPlugin.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHadoopSQLPlugin.java index b8e078d086e..aab19d31f36 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHadoopSQLPlugin.java +++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHadoopSQLPlugin.java @@ -154,25 +154,25 @@ public Set allowMetadataObjectTypesRule() { /** Translate the Gravitino securable object to the Ranger owner securable object. */ @Override public List translateOwner(MetadataObject gravitinoMetadataObject) { - List AuthorizationSecurableObjects = new ArrayList<>(); + List rangerSecurableObjects = new ArrayList<>(); switch (gravitinoMetadataObject.type()) { case METALAKE: case CATALOG: // Add `*` for the SCHEMA permission - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( ImmutableList.of(RangerHelper.RESOURCE_ALL), RangerHadoopSQLMetadataObject.Type.SCHEMA, ownerMappingRule())); // Add `*.*` for the TABLE permission - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( ImmutableList.of(RangerHelper.RESOURCE_ALL, RangerHelper.RESOURCE_ALL), RangerHadoopSQLMetadataObject.Type.TABLE, ownerMappingRule())); // Add `*.*.*` for the COLUMN permission - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( ImmutableList.of( RangerHelper.RESOURCE_ALL, @@ -183,20 +183,20 @@ public List translateOwner(MetadataObject gravitin break; case SCHEMA: // Add `{schema}` for the SCHEMA permission - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( ImmutableList.of(gravitinoMetadataObject.name() /*Schema name*/), RangerHadoopSQLMetadataObject.Type.SCHEMA, ownerMappingRule())); // Add `{schema}.*` for the TABLE permission - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( ImmutableList.of( gravitinoMetadataObject.name() /*Schema name*/, RangerHelper.RESOURCE_ALL), RangerHadoopSQLMetadataObject.Type.TABLE, ownerMappingRule())); // Add `{schema}.*.*` for the COLUMN permission - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( ImmutableList.of( gravitinoMetadataObject.name() /*Schema name*/, @@ -207,13 +207,13 @@ public List translateOwner(MetadataObject gravitin break; case TABLE: // Add `{schema}.{table}` for the TABLE permission - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( translateMetadataObject(gravitinoMetadataObject).names(), RangerHadoopSQLMetadataObject.Type.TABLE, ownerMappingRule())); // Add `{schema}.{table}.*` for the COLUMN permission - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( Stream.concat( translateMetadataObject(gravitinoMetadataObject).names().stream(), @@ -228,13 +228,13 @@ public List translateOwner(MetadataObject gravitin gravitinoMetadataObject.type()); } - return AuthorizationSecurableObjects; + return rangerSecurableObjects; } /** Translate the Gravitino securable object to the Ranger securable object. */ @Override public List translatePrivilege(SecurableObject securableObject) { - List AuthorizationSecurableObjects = new ArrayList<>(); + List rangerSecurableObjects = new ArrayList<>(); securableObject.privileges().stream() .filter(Objects::nonNull) @@ -262,7 +262,7 @@ public List translatePrivilege(SecurableObject sec case METALAKE: case CATALOG: // Add Ranger privilege(`SELECT`) to SCHEMA(`*`) - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( ImmutableList.of(RangerHelper.RESOURCE_ALL), RangerHadoopSQLMetadataObject.Type.SCHEMA, @@ -279,7 +279,7 @@ public List translatePrivilege(SecurableObject sec case METALAKE: case CATALOG: // Add Ranger privilege(`CREATE`) to SCHEMA(`*`) - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( ImmutableList.of(RangerHelper.RESOURCE_ALL), RangerHadoopSQLMetadataObject.Type.SCHEMA, @@ -296,7 +296,7 @@ public List translatePrivilege(SecurableObject sec case METALAKE: case CATALOG: // Add Ranger privilege(`SELECT`) to SCHEMA(`*`) - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( ImmutableList.of(RangerHelper.RESOURCE_ALL), RangerHadoopSQLMetadataObject.Type.SCHEMA, @@ -304,7 +304,7 @@ public List translatePrivilege(SecurableObject sec break; case SCHEMA: // Add Ranger privilege(`SELECT`) to SCHEMA(`{schema}`) - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( ImmutableList.of(securableObject.name() /*Schema name*/), RangerHadoopSQLMetadataObject.Type.SCHEMA, @@ -323,14 +323,14 @@ public List translatePrivilege(SecurableObject sec case METALAKE: case CATALOG: // Add `*.*` for the TABLE permission - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( ImmutableList.of( RangerHelper.RESOURCE_ALL, RangerHelper.RESOURCE_ALL), RangerHadoopSQLMetadataObject.Type.TABLE, rangerPrivileges)); // Add `*.*.*` for the COLUMN permission - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( ImmutableList.of( RangerHelper.RESOURCE_ALL, @@ -341,7 +341,7 @@ public List translatePrivilege(SecurableObject sec break; case SCHEMA: // Add `{schema}.*` for the TABLE permission - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( ImmutableList.of( securableObject.name() /*Schema name*/, @@ -349,7 +349,7 @@ public List translatePrivilege(SecurableObject sec RangerHadoopSQLMetadataObject.Type.TABLE, rangerPrivileges)); // Add `{schema}.*.*` for the COLUMN permission - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( ImmutableList.of( securableObject.name() /*Schema name*/, @@ -365,13 +365,13 @@ public List translatePrivilege(SecurableObject sec gravitinoPrivilege.name(), securableObject.type()); } else { // Add `{schema}.{table}` for the TABLE permission - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( translateMetadataObject(securableObject).names(), RangerHadoopSQLMetadataObject.Type.TABLE, rangerPrivileges)); // Add `{schema}.{table}.*` for the COLUMN permission - AuthorizationSecurableObjects.add( + rangerSecurableObjects.add( generateAuthorizationSecurableObject( Stream.concat( translateMetadataObject(securableObject).names().stream(), @@ -396,7 +396,7 @@ public List translatePrivilege(SecurableObject sec } }); - return AuthorizationSecurableObjects; + return rangerSecurableObjects; } /** diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java index 7a91ad54bf0..0be23eebff7 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java +++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java @@ -44,6 +44,7 @@ import org.apache.gravitino.authorization.MetadataObjectChange; import org.apache.gravitino.authorization.Owner; import org.apache.gravitino.authorization.Privilege; +import org.apache.gravitino.authorization.RangerAuthorizationProperties; import org.apache.gravitino.authorization.Role; import org.apache.gravitino.authorization.RoleChange; import org.apache.gravitino.authorization.SecurableObject; diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerPathBaseMetadataObject.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerHDFSMetadataObject.java similarity index 92% rename from authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerPathBaseMetadataObject.java rename to authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerHDFSMetadataObject.java index 77523464162..2bf842dc9ce 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerPathBaseMetadataObject.java +++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerHDFSMetadataObject.java @@ -25,7 +25,7 @@ import org.apache.gravitino.MetadataObject; import org.apache.gravitino.authorization.AuthorizationMetadataObject; -public class RangerPathBaseMetadataObject implements AuthorizationMetadataObject { +public class RangerHDFSMetadataObject implements AuthorizationMetadataObject { /** * The type of object in the Ranger system. Every type will map one kind of the entity of the * Gravitino type system. @@ -59,7 +59,7 @@ public static RangerHadoopSQLMetadataObject.Type fromMetadataType( private final AuthorizationMetadataObject.Type type; - public RangerPathBaseMetadataObject(String path, AuthorizationMetadataObject.Type type) { + public RangerHDFSMetadataObject(String path, AuthorizationMetadataObject.Type type) { this.path = path; this.type = type; } @@ -97,7 +97,7 @@ public void validateAuthorizationMetadataObject() throws IllegalArgumentExceptio type != null, "Cannot create a Ranger metadata object with no type"); Preconditions.checkArgument( - type == RangerPathBaseMetadataObject.Type.PATH, "it must be the PATH type"); + type == RangerHDFSMetadataObject.Type.PATH, "it must be the PATH type"); for (String name : names) { Preconditions.checkArgument(name != null, "Cannot create a metadata object with null name"); diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerPathBaseSecurableObject.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerHDFSSecurableObject.java similarity index 92% rename from authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerPathBaseSecurableObject.java rename to authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerHDFSSecurableObject.java index bd2c73fdaef..df1bac73545 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerPathBaseSecurableObject.java +++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerHDFSSecurableObject.java @@ -25,12 +25,12 @@ import org.apache.gravitino.authorization.AuthorizationPrivilege; import org.apache.gravitino.authorization.AuthorizationSecurableObject; -public class RangerPathBaseSecurableObject extends RangerPathBaseMetadataObject +public class RangerHDFSSecurableObject extends RangerHDFSMetadataObject implements AuthorizationSecurableObject { private final List privileges; - public RangerPathBaseSecurableObject( + public RangerHDFSSecurableObject( String path, AuthorizationMetadataObject.Type type, Set privileges) { super(path, type); this.privileges = ImmutableList.copyOf(privileges); diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerHelper.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerHelper.java index 4c2b2956c8c..42565d8fd9d 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerHelper.java +++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerHelper.java @@ -49,8 +49,10 @@ public class RangerHelper { private static final Logger LOG = LoggerFactory.getLogger(RangerHelper.class); public static final String MANAGED_BY_GRAVITINO = "MANAGED_BY_GRAVITINO"; - /** The `*` gives access to all resources */ + /** The `*` gives access to all table resources */ public static final String RESOURCE_ALL = "*"; + /** The `/` gives access to all path resources */ + public static final String RESOURCE_ROOT_PATH = "/test/"; /** The owner privileges, the owner can do anything on the metadata object */ private final Set ownerPrivileges; /** The policy search keys */ diff --git a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerPrivileges.java b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerPrivileges.java index bbae16a6ba2..3d7c35a869d 100644 --- a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerPrivileges.java +++ b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerPrivileges.java @@ -116,6 +116,32 @@ public boolean equalsTo(String value) { } } + public static class RangerHDFSPrivilegeImpl implements AuthorizationPrivilege { + private AuthorizationPrivilege rangerHivePrivilege; + private Privilege.Condition condition; + + public RangerHDFSPrivilegeImpl( + AuthorizationPrivilege rangerHivePrivilege, Privilege.Condition condition) { + this.rangerHivePrivilege = rangerHivePrivilege; + this.condition = condition; + } + + @Override + public String getName() { + return rangerHivePrivilege.getName(); + } + + @Override + public Privilege.Condition condition() { + return condition; + } + + @Override + public boolean equalsTo(String value) { + return rangerHivePrivilege.equalsTo(value); + } + } + static List>> allRangerPrivileges = Lists.newArrayList( RangerHadoopSQLPrivilege.class, RangerPrivileges.RangerHdfsPrivilege.class); diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationHDFSPluginIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationHDFSPluginIT.java index 4062263222b..1413ec3037d 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationHDFSPluginIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationHDFSPluginIT.java @@ -28,7 +28,7 @@ import org.apache.gravitino.authorization.SecurableObject; import org.apache.gravitino.authorization.SecurableObjects; import org.apache.gravitino.authorization.ranger.RangerAuthorizationPlugin; -import org.apache.gravitino.authorization.ranger.RangerPathBaseMetadataObject; +import org.apache.gravitino.authorization.ranger.RangerHDFSMetadataObject; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; @@ -56,19 +56,19 @@ public void testTranslateMetadataObject() { MetadataObject metalake = MetadataObjects.parse(String.format("metalake1"), MetadataObject.Type.METALAKE); Assertions.assertEquals( - RangerPathBaseMetadataObject.Type.PATH, + RangerHDFSMetadataObject.Type.PATH, rangerAuthPlugin.translateMetadataObject(metalake).type()); MetadataObject catalog = MetadataObjects.parse(String.format("catalog1"), MetadataObject.Type.CATALOG); Assertions.assertEquals( - RangerPathBaseMetadataObject.Type.PATH, + RangerHDFSMetadataObject.Type.PATH, rangerAuthPlugin.translateMetadataObject(catalog).type()); MetadataObject schema = MetadataObjects.parse(String.format("catalog1.schema1"), MetadataObject.Type.SCHEMA); Assertions.assertEquals( - RangerPathBaseMetadataObject.Type.PATH, + RangerHDFSMetadataObject.Type.PATH, rangerAuthPlugin.translateMetadataObject(schema).type()); MetadataObject table = @@ -82,7 +82,7 @@ public void testTranslateMetadataObject() { AuthorizationMetadataObject rangerFileset = rangerAuthPlugin.translateMetadataObject(fileset); Assertions.assertEquals(1, rangerFileset.names().size()); Assertions.assertEquals("/test", rangerFileset.fullName()); - Assertions.assertEquals(RangerPathBaseMetadataObject.Type.PATH, rangerFileset.type()); + Assertions.assertEquals(RangerHDFSMetadataObject.Type.PATH, rangerFileset.type()); } @Test @@ -137,7 +137,7 @@ public void testTranslatePrivilege() { filesetInFileset1.forEach( securableObject -> { - Assertions.assertEquals(RangerPathBaseMetadataObject.Type.PATH, securableObject.type()); + Assertions.assertEquals(RangerHDFSMetadataObject.Type.PATH, securableObject.type()); Assertions.assertEquals("/test", securableObject.fullName()); Assertions.assertEquals(2, securableObject.privileges().size()); }); @@ -166,7 +166,7 @@ public void testTranslateOwner() { List filesetOwner = rangerAuthPlugin.translateOwner(fileset); Assertions.assertEquals(1, filesetOwner.size()); Assertions.assertEquals("/test", filesetOwner.get(0).fullName()); - Assertions.assertEquals(RangerPathBaseMetadataObject.Type.PATH, filesetOwner.get(0).type()); + Assertions.assertEquals(RangerHDFSMetadataObject.Type.PATH, filesetOwner.get(0).type()); Assertions.assertEquals(3, filesetOwner.get(0).privileges().size()); } } diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerBaseE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerBaseE2EIT.java index c7c9ec02f22..41535dabdad 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerBaseE2EIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerBaseE2EIT.java @@ -61,7 +61,6 @@ public abstract class RangerBaseE2EIT extends BaseIT { protected static GravitinoMetalake metalake; protected static Catalog catalog; protected static String HIVE_METASTORE_URIS; - protected static String RANGER_ADMIN_URL = null; protected static SparkSession sparkSession = null; protected static final String HADOOP_USER_NAME = "HADOOP_USER_NAME"; @@ -102,13 +101,13 @@ public abstract class RangerBaseE2EIT extends BaseIT { protected static final String SQL_DROP_TABLE = String.format("DROP TABLE %s", tableName); - protected static void generateRangerSparkSecurityXML() throws IOException { + protected static void generateRangerSparkSecurityXML(String modeName) throws IOException { String templatePath = String.join( File.separator, System.getenv("GRAVITINO_ROOT_DIR"), "authorizations", - "authorization-ranger", + modeName, "src", "test", "resources", @@ -118,7 +117,7 @@ protected static void generateRangerSparkSecurityXML() throws IOException { File.separator, System.getenv("GRAVITINO_ROOT_DIR"), "authorizations", - "authorization-ranger", + modeName, "build", "resources", "test", @@ -128,7 +127,7 @@ protected static void generateRangerSparkSecurityXML() throws IOException { FileUtils.readFileToString(new File(templatePath), StandardCharsets.UTF_8); templateContext = templateContext - .replace("__REPLACE__RANGER_ADMIN_URL", RANGER_ADMIN_URL) + .replace("__REPLACE__RANGER_ADMIN_URL", RangerITEnv.RANGER_ADMIN_URL) .replace("__REPLACE__RANGER_HIVE_REPO_NAME", RangerITEnv.RANGER_HIVE_REPO_NAME); FileUtils.writeStringToFile(new File(xmlPath), templateContext, StandardCharsets.UTF_8); } @@ -204,7 +203,7 @@ protected static void waitForUpdatingPolicies() { protected abstract void testAlterTable(); @Test - protected void testCreateSchema() throws InterruptedException { + protected void testCreateSchema() throws InterruptedException, IOException { // Choose a catalog useCatalog(); diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java index d8024afcc11..98058007e7c 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java @@ -43,9 +43,9 @@ import org.apache.gravitino.auth.AuthConstants; import org.apache.gravitino.auth.AuthenticatorType; import org.apache.gravitino.authorization.Privileges; +import org.apache.gravitino.authorization.RangerAuthorizationProperties; import org.apache.gravitino.authorization.SecurableObject; import org.apache.gravitino.authorization.SecurableObjects; -import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties; import org.apache.gravitino.authorization.ranger.RangerHelper; import org.apache.gravitino.authorization.ranger.RangerPrivileges; import org.apache.gravitino.client.GravitinoMetalake; diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java index 363f8f0b3a1..e7999ba4d5b 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java @@ -20,7 +20,6 @@ import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER; import static org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE; -import static org.apache.gravitino.integration.test.container.RangerContainer.RANGER_SERVER_PORT; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; @@ -29,7 +28,7 @@ import org.apache.gravitino.Configs; import org.apache.gravitino.auth.AuthConstants; import org.apache.gravitino.auth.AuthenticatorType; -import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties; +import org.apache.gravitino.authorization.RangerAuthorizationProperties; import org.apache.gravitino.catalog.hive.HiveConstants; import org.apache.gravitino.exceptions.UserAlreadyExistsException; import org.apache.gravitino.integration.test.container.HiveContainer; @@ -67,18 +66,13 @@ public void startIntegrationTest() throws Exception { RangerITEnv.init(RangerBaseE2EIT.metalakeName, true); RangerITEnv.startHiveRangerContainer(); - RANGER_ADMIN_URL = - String.format( - "http://%s:%d", - containerSuite.getRangerContainer().getContainerIpAddress(), RANGER_SERVER_PORT); - HIVE_METASTORE_URIS = String.format( "thrift://%s:%d", containerSuite.getHiveRangerContainer().getContainerIpAddress(), HiveContainer.HIVE_METASTORE_PORT); - generateRangerSparkSecurityXML(); + generateRangerSparkSecurityXML("authorization-ranger"); sparkSession = SparkSession.builder() @@ -186,7 +180,7 @@ public void createCatalog() { RangerAuthorizationProperties.RANGER_SERVICE_NAME, RangerITEnv.RANGER_HIVE_REPO_NAME, RangerAuthorizationProperties.RANGER_ADMIN_URL, - RANGER_ADMIN_URL, + RangerITEnv.RANGER_ADMIN_URL, RangerAuthorizationProperties.RANGER_AUTH_TYPE, RangerContainer.authType, RangerAuthorizationProperties.RANGER_USERNAME, diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java index 2efc1e9dd60..67c3db39a36 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java @@ -18,6 +18,7 @@ */ package org.apache.gravitino.authorization.ranger.integration.test; +import static org.apache.gravitino.integration.test.container.RangerContainer.RANGER_SERVER_PORT; import static org.mockito.Mockito.doReturn; import com.google.common.collect.ImmutableList; @@ -31,11 +32,11 @@ import java.util.stream.Collectors; import org.apache.gravitino.authorization.AuthorizationSecurableObject; import org.apache.gravitino.authorization.Privilege; +import org.apache.gravitino.authorization.RangerAuthorizationProperties; import org.apache.gravitino.authorization.Role; import org.apache.gravitino.authorization.ranger.RangerAuthorizationHDFSPlugin; import org.apache.gravitino.authorization.ranger.RangerAuthorizationHadoopSQLPlugin; import org.apache.gravitino.authorization.ranger.RangerAuthorizationPlugin; -import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties; import org.apache.gravitino.authorization.ranger.RangerHelper; import org.apache.gravitino.authorization.ranger.RangerPrivileges; import org.apache.gravitino.authorization.ranger.reference.RangerDefines; @@ -87,11 +88,15 @@ public class RangerITEnv { public static RangerAuthorizationPlugin rangerAuthHivePlugin; public static RangerAuthorizationPlugin rangerAuthHDFSPlugin; protected static RangerHelper rangerHelper; - protected static RangerHelper rangerHDFSHelper; + public static String RANGER_ADMIN_URL = null; public static void init(String metalakeName, boolean allowAnyoneAccessHDFS) { containerSuite.startRangerContainer(); + RANGER_ADMIN_URL = + String.format( + "http://%s:%d", + containerSuite.getRangerContainer().getContainerIpAddress(), RANGER_SERVER_PORT); rangerClient = containerSuite.getRangerContainer().rangerClient; rangerAuthHivePlugin = @@ -134,7 +139,7 @@ public static void init(String metalakeName, boolean allowAnyoneAccessHDFS) { "HDFS", RangerAuthorizationProperties.RANGER_SERVICE_NAME, RangerITEnv.RANGER_HDFS_REPO_NAME))); - doReturn("/test").when(spyRangerAuthorizationHDFSPlugin).getFileSetPath(Mockito.any()); + doReturn("/test").when(spyRangerAuthorizationHDFSPlugin).getLocationPath(Mockito.any()); rangerAuthHDFSPlugin = spyRangerAuthorizationHDFSPlugin; rangerHelper = diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java index 8f6f769504a..31039440090 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java @@ -21,7 +21,6 @@ import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER; import static org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.currentFunName; import static org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE; -import static org.apache.gravitino.integration.test.container.RangerContainer.RANGER_SERVER_PORT; import com.google.common.collect.Lists; import com.google.common.collect.Maps; @@ -33,9 +32,9 @@ import org.apache.gravitino.auth.AuthConstants; import org.apache.gravitino.auth.AuthenticatorType; import org.apache.gravitino.authorization.Privileges; +import org.apache.gravitino.authorization.RangerAuthorizationProperties; import org.apache.gravitino.authorization.SecurableObject; import org.apache.gravitino.authorization.SecurableObjects; -import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties; import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergConstants; import org.apache.gravitino.integration.test.container.HiveContainer; import org.apache.gravitino.integration.test.container.RangerContainer; @@ -70,18 +69,13 @@ public void startIntegrationTest() throws Exception { RangerITEnv.init(RangerBaseE2EIT.metalakeName, true); RangerITEnv.startHiveRangerContainer(); - RANGER_ADMIN_URL = - String.format( - "http://%s:%d", - containerSuite.getRangerContainer().getContainerIpAddress(), RANGER_SERVER_PORT); - HIVE_METASTORE_URIS = String.format( "thrift://%s:%d", containerSuite.getHiveRangerContainer().getContainerIpAddress(), HiveContainer.HIVE_METASTORE_PORT); - generateRangerSparkSecurityXML(); + generateRangerSparkSecurityXML("authorization-ranger"); sparkSession = SparkSession.builder() @@ -179,7 +173,7 @@ public void createCatalog() { properties.put(RangerAuthorizationProperties.RANGER_SERVICE_TYPE, "HadoopSQL"); properties.put( RangerAuthorizationProperties.RANGER_SERVICE_NAME, RangerITEnv.RANGER_HIVE_REPO_NAME); - properties.put(RangerAuthorizationProperties.RANGER_ADMIN_URL, RANGER_ADMIN_URL); + properties.put(RangerAuthorizationProperties.RANGER_ADMIN_URL, RangerITEnv.RANGER_ADMIN_URL); properties.put(RangerAuthorizationProperties.RANGER_AUTH_TYPE, RangerContainer.authType); properties.put(RangerAuthorizationProperties.RANGER_USERNAME, RangerContainer.rangerUserName); properties.put(RangerAuthorizationProperties.RANGER_PASSWORD, RangerContainer.rangerPassword); diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java index 2773610048e..cb24489b704 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java @@ -20,7 +20,6 @@ import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER; import static org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.currentFunName; -import static org.apache.gravitino.integration.test.container.RangerContainer.RANGER_SERVER_PORT; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; @@ -32,9 +31,9 @@ import org.apache.gravitino.auth.AuthConstants; import org.apache.gravitino.auth.AuthenticatorType; import org.apache.gravitino.authorization.Privileges; +import org.apache.gravitino.authorization.RangerAuthorizationProperties; import org.apache.gravitino.authorization.SecurableObject; import org.apache.gravitino.authorization.SecurableObjects; -import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties; import org.apache.gravitino.integration.test.container.HiveContainer; import org.apache.gravitino.integration.test.container.RangerContainer; import org.apache.gravitino.integration.test.util.GravitinoITUtils; @@ -69,18 +68,13 @@ public void startIntegrationTest() throws Exception { RangerITEnv.init(RangerBaseE2EIT.metalakeName, true); RangerITEnv.startHiveRangerContainer(); - RANGER_ADMIN_URL = - String.format( - "http://%s:%d", - containerSuite.getRangerContainer().getContainerIpAddress(), RANGER_SERVER_PORT); - HIVE_METASTORE_URIS = String.format( "thrift://%s:%d", containerSuite.getHiveRangerContainer().getContainerIpAddress(), HiveContainer.HIVE_METASTORE_PORT); - generateRangerSparkSecurityXML(); + generateRangerSparkSecurityXML("authorization-ranger"); sparkSession = SparkSession.builder() @@ -199,7 +193,7 @@ public void createCatalog() { RangerAuthorizationProperties.RANGER_SERVICE_NAME, RangerITEnv.RANGER_HIVE_REPO_NAME, RangerAuthorizationProperties.RANGER_ADMIN_URL, - RANGER_ADMIN_URL, + RangerITEnv.RANGER_ADMIN_URL, RangerAuthorizationProperties.RANGER_AUTH_TYPE, RangerContainer.authType, RangerAuthorizationProperties.RANGER_USERNAME, diff --git a/authorizations/build.gradle.kts b/authorizations/build.gradle.kts index 043fbfec673..354b36aae64 100644 --- a/authorizations/build.gradle.kts +++ b/authorizations/build.gradle.kts @@ -17,6 +17,18 @@ * under the License. */ -tasks.all { - enabled = false -} \ No newline at end of file +tasks { + test { + subprojects.forEach { + dependsOn(":${project.name}:${it.name}:test") + } + } + + register("copyLibAndConfig", Copy::class) { + subprojects.forEach { + if (!it.name.startsWith("authorization-common")) { + dependsOn(":${project.name}:${it.name}:copyLibAndConfig") + } + } + } +} diff --git a/build.gradle.kts b/build.gradle.kts index 5e93992e34e..c64997f3a90 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -583,7 +583,7 @@ tasks { val outputDir = projectDir.dir("distribution") val compileDistribution by registering { - dependsOn(":web:web:build", "copySubprojectDependencies", "copyCatalogLibAndConfigs", "copyAuthorizationLibAndConfigs", "copySubprojectLib", "iceberg:iceberg-rest-server:copyLibAndConfigs") + dependsOn(":web:web:build", "copySubprojectDependencies", "copyCatalogLibAndConfigs", ":authorizations:copyLibAndConfig", "copySubprojectLib", "iceberg:iceberg-rest-server:copyLibAndConfigs") group = "gravitino distribution" outputs.dir(projectDir.dir("distribution/package")) @@ -829,12 +829,6 @@ tasks { ) } - register("copyAuthorizationLibAndConfigs", Copy::class) { - dependsOn( - ":authorizations:authorization-ranger:copyLibAndConfig" - ) - } - clean { dependsOn(cleanDistribution) } diff --git a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java index 3d71948b744..36307f3ba4b 100644 --- a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java +++ b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java @@ -24,7 +24,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; -import java.lang.reflect.Field; import java.time.LocalDate; import java.util.Collections; import java.util.Map; @@ -423,21 +422,4 @@ private static void loadCatalogWithAnotherClient() { anotherCatalogWithNotExistingName = anotherClientWithNotExistingName.loadMetalake(METALAKE_NAME).loadCatalog(CATALOG_NAME); } - - public static void setEnv(String key, String value) { - try { - Map env = System.getenv(); - Class cl = env.getClass(); - Field field = cl.getDeclaredField("m"); - field.setAccessible(true); - Map writableEnv = (Map) field.get(env); - if (value == null) { - writableEnv.remove(key); - } else { - writableEnv.put(key, value); - } - } catch (Exception e) { - throw new IllegalStateException("Failed to set environment variable", e); - } - } } diff --git a/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java b/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java index 2e77b8e162a..2bdba2d0761 100644 --- a/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java +++ b/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java @@ -58,7 +58,6 @@ import java.util.stream.Collectors; import javax.annotation.Nullable; import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.gravitino.Catalog; import org.apache.gravitino.CatalogChange; @@ -79,6 +78,7 @@ import org.apache.gravitino.connector.HasPropertyMetadata; import org.apache.gravitino.connector.PropertyEntry; import org.apache.gravitino.connector.SupportsSchemas; +import org.apache.gravitino.connector.authorization.BaseAuthorization; import org.apache.gravitino.connector.capability.Capability; import org.apache.gravitino.exceptions.CatalogAlreadyExistsException; import org.apache.gravitino.exceptions.CatalogInUseException; @@ -929,7 +929,7 @@ private IsolatedClassLoader createClassLoader(String provider, Map libAndResourcesPaths = Lists.newArrayList(catalogPkgPath, catalogConfPath); - buildAuthorizationPkgPath(conf).ifPresent(libAndResourcesPaths::add); + BaseAuthorization.buildAuthorizationPkgPath(conf).ifPresent(libAndResourcesPaths::add); return IsolatedClassLoader.buildClassLoader(libAndResourcesPaths); } else { // This will use the current class loader, it is mainly used for test. @@ -1046,37 +1046,6 @@ private String buildPkgPath(Map conf, String provider) { return pkgPath; } - private Optional buildAuthorizationPkgPath(Map conf) { - String gravitinoHome = System.getenv("GRAVITINO_HOME"); - Preconditions.checkArgument(gravitinoHome != null, "GRAVITINO_HOME not set"); - boolean testEnv = System.getenv("GRAVITINO_TEST") != null; - - String authorizationProvider = conf.get(Catalog.AUTHORIZATION_PROVIDER); - if (StringUtils.isBlank(authorizationProvider)) { - return Optional.empty(); - } - - String pkgPath; - if (testEnv) { - // In test, the authorization package is under the build directory. - pkgPath = - String.join( - File.separator, - gravitinoHome, - "authorizations", - "authorization-" + authorizationProvider, - "build", - "libs"); - } else { - // In real environment, the authorization package is under the authorization directory. - pkgPath = - String.join( - File.separator, gravitinoHome, "authorizations", authorizationProvider, "libs"); - } - - return Optional.of(pkgPath); - } - private Class lookupCatalogProvider(String provider, ClassLoader cl) { ServiceLoader loader = ServiceLoader.load(CatalogProvider.class, cl); diff --git a/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java b/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java index 88fd47ab998..218c2a428b3 100644 --- a/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java +++ b/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java @@ -19,22 +19,16 @@ package org.apache.gravitino.connector; import com.google.common.base.Preconditions; -import com.google.common.collect.Iterables; import com.google.common.collect.Maps; -import com.google.common.collect.Streams; import java.io.Closeable; import java.io.IOException; -import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.ServiceLoader; -import java.util.stream.Collectors; import org.apache.gravitino.Audit; import org.apache.gravitino.Catalog; import org.apache.gravitino.CatalogProvider; import org.apache.gravitino.annotation.Evolving; import org.apache.gravitino.connector.authorization.AuthorizationPlugin; -import org.apache.gravitino.connector.authorization.AuthorizationProvider; import org.apache.gravitino.connector.authorization.BaseAuthorization; import org.apache.gravitino.connector.capability.Capability; import org.apache.gravitino.meta.CatalogEntity; @@ -209,34 +203,7 @@ public void initAuthorizationPluginInstance(IsolatedClassLoader classLoader) { } try { BaseAuthorization authorization = - classLoader.withClassLoader( - cl -> { - try { - ServiceLoader loader = - ServiceLoader.load(AuthorizationProvider.class, cl); - - List> providers = - Streams.stream(loader.iterator()) - .filter(p -> p.shortName().equalsIgnoreCase(authorizationProvider)) - .map(AuthorizationProvider::getClass) - .collect(Collectors.toList()); - if (providers.isEmpty()) { - throw new IllegalArgumentException( - "No authorization provider found for: " + authorizationProvider); - } else if (providers.size() > 1) { - throw new IllegalArgumentException( - "Multiple authorization providers found for: " - + authorizationProvider); - } - return (BaseAuthorization) - Iterables.getOnlyElement(providers) - .getDeclaredConstructor() - .newInstance(); - } catch (Exception e) { - LOG.error("Failed to create authorization instance", e); - throw new RuntimeException(e); - } - }); + BaseAuthorization.createAuthorization(classLoader, authorizationProvider); authorizationPlugin = authorization.newPlugin(entity.namespace().level(0), provider(), this.conf); } catch (Exception e) { diff --git a/core/src/main/java/org/apache/gravitino/connector/authorization/BaseAuthorization.java b/core/src/main/java/org/apache/gravitino/connector/authorization/BaseAuthorization.java index 173ad3527a8..cd97c475cfd 100644 --- a/core/src/main/java/org/apache/gravitino/connector/authorization/BaseAuthorization.java +++ b/core/src/main/java/org/apache/gravitino/connector/authorization/BaseAuthorization.java @@ -18,9 +18,20 @@ */ package org.apache.gravitino.connector.authorization; +import com.google.common.base.Preconditions; +import com.google.common.collect.Iterables; +import com.google.common.collect.Streams; import java.io.Closeable; +import java.io.File; import java.io.IOException; +import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.ServiceLoader; +import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; +import org.apache.gravitino.Catalog; +import org.apache.gravitino.utils.IsolatedClassLoader; /** * The abstract base class for Authorization implementations.
@@ -46,4 +57,64 @@ public abstract AuthorizationPlugin newPlugin( @Override public void close() throws IOException {} + + public static BaseAuthorization createAuthorization( + IsolatedClassLoader classLoader, String authorizationProvider) throws Exception { + classLoader.withClassLoader( + cl -> { + try { + ServiceLoader loader = + ServiceLoader.load(AuthorizationProvider.class, cl); + + List> providers = + Streams.stream(loader.iterator()) + .filter(p -> p.shortName().equalsIgnoreCase(authorizationProvider)) + .map(AuthorizationProvider::getClass) + .collect(Collectors.toList()); + if (providers.isEmpty()) { + throw new IllegalArgumentException( + "No authorization provider found for: " + authorizationProvider); + } else if (providers.size() > 1) { + throw new IllegalArgumentException( + "Multiple authorization providers found for: " + authorizationProvider); + } + return (BaseAuthorization) + Iterables.getOnlyElement(providers).getDeclaredConstructor().newInstance(); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + return null; + } + + public static Optional buildAuthorizationPkgPath(Map conf) { + String gravitinoHome = System.getenv("GRAVITINO_HOME"); + Preconditions.checkArgument(gravitinoHome != null, "GRAVITINO_HOME not set"); + boolean testEnv = System.getenv("GRAVITINO_TEST") != null; + + String authorizationProvider = conf.get(Catalog.AUTHORIZATION_PROVIDER); + if (StringUtils.isBlank(authorizationProvider)) { + return Optional.empty(); + } + + String pkgPath; + if (testEnv) { + // In test, the authorization package is under the build directory. + pkgPath = + String.join( + File.separator, + gravitinoHome, + "authorizations", + "authorization-" + authorizationProvider, + "build", + "libs"); + } else { + // In real environment, the authorization package is under the authorization directory. + pkgPath = + String.join( + File.separator, gravitinoHome, "authorizations", authorizationProvider, "libs"); + } + + return Optional.of(pkgPath); + } } diff --git a/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java b/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java index 554ef0cec8b..4ee37b4ddec 100644 --- a/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java +++ b/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java @@ -24,8 +24,8 @@ import org.apache.gravitino.Catalog; import org.apache.gravitino.Namespace; import org.apache.gravitino.TestCatalog; -import org.apache.gravitino.connector.authorization.mysql.TestMySQLAuthorizationPlugin; -import org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationPlugin; +import org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationHDFSPlugin; +import org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationHadoopSQLPlugin; import org.apache.gravitino.meta.AuditInfo; import org.apache.gravitino.meta.CatalogEntity; import org.apache.gravitino.utils.IsolatedClassLoader; @@ -35,7 +35,7 @@ public class TestAuthorization { private static TestCatalog hiveCatalog; - private static TestCatalog mySQLCatalog; + private static TestCatalog filesetCatalog; @BeforeAll public static void setUp() throws Exception { @@ -54,49 +54,59 @@ public static void setUp() throws Exception { hiveCatalog = new TestCatalog() - .withCatalogConf(ImmutableMap.of(Catalog.AUTHORIZATION_PROVIDER, "ranger")) + .withCatalogConf( + ImmutableMap.of( + Catalog.AUTHORIZATION_PROVIDER, + "test-ranger", + "authorization.ranger.service.type", + "HadoopSQL")) .withCatalogEntity(hiveCatalogEntity); IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader( Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); hiveCatalog.initAuthorizationPluginInstance(isolatedClassLoader); - CatalogEntity mySQLEntity = + CatalogEntity filesetEntity = CatalogEntity.builder() .withId(2L) .withName("catalog-test2") .withNamespace(Namespace.of("default")) - .withType(Catalog.Type.RELATIONAL) + .withType(Catalog.Type.FILESET) .withProvider("test") .withAuditInfo(auditInfo) .build(); - mySQLCatalog = + filesetCatalog = new TestCatalog() - .withCatalogConf(ImmutableMap.of(Catalog.AUTHORIZATION_PROVIDER, "mysql")) - .withCatalogEntity(mySQLEntity); - mySQLCatalog.initAuthorizationPluginInstance(isolatedClassLoader); + .withCatalogConf( + ImmutableMap.of( + Catalog.AUTHORIZATION_PROVIDER, + "test-ranger", + "authorization.ranger.service.type", + "HDFS")) + .withCatalogEntity(filesetEntity); + filesetCatalog.initAuthorizationPluginInstance(isolatedClassLoader); } @Test - public void testRangerAuthorization() { - AuthorizationPlugin rangerAuthPlugin = hiveCatalog.getAuthorizationPlugin(); - Assertions.assertInstanceOf(TestRangerAuthorizationPlugin.class, rangerAuthPlugin); - TestRangerAuthorizationPlugin testRangerAuthPlugin = - (TestRangerAuthorizationPlugin) rangerAuthPlugin; - Assertions.assertFalse(testRangerAuthPlugin.callOnCreateRole1); - rangerAuthPlugin.onRoleCreated(null); - Assertions.assertTrue(testRangerAuthPlugin.callOnCreateRole1); + public void testRangerHadoopSQLAuthorization() { + AuthorizationPlugin rangerHiveAuthPlugin = hiveCatalog.getAuthorizationPlugin(); + Assertions.assertInstanceOf(TestRangerAuthorizationHadoopSQLPlugin.class, rangerHiveAuthPlugin); + TestRangerAuthorizationHadoopSQLPlugin testRangerAuthHadoopSQLPlugin = + (TestRangerAuthorizationHadoopSQLPlugin) rangerHiveAuthPlugin; + Assertions.assertFalse(testRangerAuthHadoopSQLPlugin.callOnCreateRole1); + rangerHiveAuthPlugin.onRoleCreated(null); + Assertions.assertTrue(testRangerAuthHadoopSQLPlugin.callOnCreateRole1); } @Test - public void testMySQLAuthorization() { - AuthorizationPlugin mySQLAuthPlugin = mySQLCatalog.getAuthorizationPlugin(); - Assertions.assertInstanceOf(TestMySQLAuthorizationPlugin.class, mySQLAuthPlugin); - TestMySQLAuthorizationPlugin testMySQLAuthPlugin = - (TestMySQLAuthorizationPlugin) mySQLAuthPlugin; - Assertions.assertFalse(testMySQLAuthPlugin.callOnCreateRole2); - mySQLAuthPlugin.onRoleCreated(null); - Assertions.assertTrue(testMySQLAuthPlugin.callOnCreateRole2); + public void testRangerHDFSAuthorization() { + AuthorizationPlugin rangerHDFSAuthPlugin = filesetCatalog.getAuthorizationPlugin(); + Assertions.assertInstanceOf(TestRangerAuthorizationHDFSPlugin.class, rangerHDFSAuthPlugin); + TestRangerAuthorizationHDFSPlugin testRangerAuthHDFSPlugin = + (TestRangerAuthorizationHDFSPlugin) rangerHDFSAuthPlugin; + Assertions.assertFalse(testRangerAuthHDFSPlugin.callOnCreateRole2); + rangerHDFSAuthPlugin.onRoleCreated(null); + Assertions.assertTrue(testRangerAuthHDFSPlugin.callOnCreateRole2); } } diff --git a/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorization.java b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorization.java index 9df9a8d63b7..1709c90319f 100644 --- a/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorization.java +++ b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorization.java @@ -18,6 +18,7 @@ */ package org.apache.gravitino.connector.authorization.ranger; +import com.google.common.base.Preconditions; import java.util.Map; import org.apache.gravitino.connector.authorization.AuthorizationPlugin; import org.apache.gravitino.connector.authorization.BaseAuthorization; @@ -28,12 +29,23 @@ public TestRangerAuthorization() {} @Override public String shortName() { - return "ranger"; + return "test-ranger"; } @Override public AuthorizationPlugin newPlugin( - String metalake, String catalogProvider, Map config) { - return new TestRangerAuthorizationPlugin(); + String metalake, String catalogProvider, Map properties) { + Preconditions.checkArgument( + properties.containsKey("authorization.ranger.service.type"), + String.format("%s is required", "authorization.ranger.service.type")); + String serviceType = properties.get("authorization.ranger.service.type").toUpperCase(); + switch (serviceType) { + case "HADOOPSQL": + return new TestRangerAuthorizationHadoopSQLPlugin(); + case "HDFS": + return new TestRangerAuthorizationHDFSPlugin(); + default: + throw new IllegalArgumentException("Unsupported service type: " + serviceType); + } } } diff --git a/core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorizationPlugin.java b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHDFSPlugin.java similarity index 95% rename from core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorizationPlugin.java rename to core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHDFSPlugin.java index e078eda410e..fdc28f8143e 100644 --- a/core/src/test/java/org/apache/gravitino/connector/authorization/mysql/TestMySQLAuthorizationPlugin.java +++ b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHDFSPlugin.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.apache.gravitino.connector.authorization.mysql; +package org.apache.gravitino.connector.authorization.ranger; import java.io.IOException; import java.util.List; @@ -29,7 +29,7 @@ import org.apache.gravitino.authorization.User; import org.apache.gravitino.connector.authorization.AuthorizationPlugin; -public class TestMySQLAuthorizationPlugin implements AuthorizationPlugin { +public class TestRangerAuthorizationHDFSPlugin implements AuthorizationPlugin { public boolean callOnCreateRole2 = false; @Override diff --git a/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationPlugin.java b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHadoopSQLPlugin.java similarity index 97% rename from core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationPlugin.java rename to core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHadoopSQLPlugin.java index 8a68f825d0e..10dbe521e6c 100644 --- a/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationPlugin.java +++ b/core/src/test/java/org/apache/gravitino/connector/authorization/ranger/TestRangerAuthorizationHadoopSQLPlugin.java @@ -29,7 +29,7 @@ import org.apache.gravitino.authorization.User; import org.apache.gravitino.connector.authorization.AuthorizationPlugin; -public class TestRangerAuthorizationPlugin implements AuthorizationPlugin { +public class TestRangerAuthorizationHadoopSQLPlugin implements AuthorizationPlugin { public boolean callOnCreateRole1 = false; @Override diff --git a/core/src/test/resources/META-INF/services/org.apache.gravitino.connector.authorization.AuthorizationProvider b/core/src/test/resources/META-INF/services/org.apache.gravitino.connector.authorization.AuthorizationProvider index e49cb8937e0..b7219fdc279 100644 --- a/core/src/test/resources/META-INF/services/org.apache.gravitino.connector.authorization.AuthorizationProvider +++ b/core/src/test/resources/META-INF/services/org.apache.gravitino.connector.authorization.AuthorizationProvider @@ -16,5 +16,4 @@ # specific language governing permissions and limitations # under the License. # -org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorization -org.apache.gravitino.connector.authorization.mysql.TestMySQLAuthorization \ No newline at end of file +org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorization \ No newline at end of file diff --git a/integration-test-common/build.gradle.kts b/integration-test-common/build.gradle.kts index a25ad4cff8f..283169a76a9 100644 --- a/integration-test-common/build.gradle.kts +++ b/integration-test-common/build.gradle.kts @@ -54,7 +54,10 @@ dependencies { exclude("org.elasticsearch.client") exclude("org.elasticsearch.plugin") } - + testImplementation(libs.hadoop3.common) { + exclude("com.sun.jersey") + exclude("javax.servlet", "servlet-api") + } testImplementation(platform("org.junit:junit-bom:5.9.1")) testImplementation("org.junit.jupiter:junit-jupiter") } diff --git a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/BaseIT.java b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/BaseIT.java index fcf8ebb2b9c..091a60d9cdf 100644 --- a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/BaseIT.java +++ b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/BaseIT.java @@ -26,6 +26,7 @@ import com.google.common.base.Splitter; import java.io.File; import java.io.IOException; +import java.lang.reflect.Field; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; @@ -57,6 +58,7 @@ import org.apache.gravitino.server.GravitinoServer; import org.apache.gravitino.server.ServerConfig; import org.apache.gravitino.server.web.JettyServerConfig; +import org.apache.hadoop.security.UserGroupInformation; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.TestInstance; @@ -422,4 +424,37 @@ protected static void copyBundleJarsToHadoop(String bundleName) { String hadoopLibDirs = ITUtils.joinPath(gravitinoHome, "catalogs", "hadoop", "libs"); copyBundleJarsToDirectory(bundleName, hadoopLibDirs); } + + public static void runInEnv(String key, String value, Runnable lambda) { + String originalValue = System.getenv(key); + try { + setEnv(key, value); + if (key.equals("HADOOP_USER_NAME") && value != null) { + UserGroupInformation.setLoginUser(null); + System.setProperty("user.name", value); + } + lambda.run(); + } catch (Exception e) { + throw new IllegalStateException("Failed to set environment variable", e); + } finally { + setEnv(key, originalValue); + } + } + + public static void setEnv(String key, String value) { + try { + Map env = System.getenv(); + Class cl = env.getClass(); + Field field = cl.getDeclaredField("m"); + field.setAccessible(true); + Map writableEnv = (Map) field.get(env); + if (value == null) { + writableEnv.remove(key); + } else { + writableEnv.put(key, value); + } + } catch (Exception e) { + throw new IllegalStateException("Failed to set environment variable", e); + } + } } diff --git a/settings.gradle.kts b/settings.gradle.kts index 150acdb00ce..dc745f3497f 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -57,7 +57,7 @@ if (gradle.startParameter.projectProperties["enableFuse"]?.toBoolean() == true) } include("iceberg:iceberg-common") include("iceberg:iceberg-rest-server") -include("authorizations:authorization-ranger") +include("authorizations:authorization-common", "authorizations:authorization-ranger", "authorizations:authorization-chain") include("trino-connector:trino-connector", "trino-connector:integration-test") include("spark-connector:spark-common") // kyuubi hive connector doesn't support 2.13 for Spark3.3