Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

package org.apache.doris.datasource.credentials;

import org.apache.doris.datasource.property.metastore.MetastoreProperties;
import org.apache.doris.datasource.property.storage.StorageProperties;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;

public abstract class AbstractVendedCredentialsProvider {

private static final Logger LOG = LogManager.getLogger(AbstractVendedCredentialsProvider.class);

/**
* Get temporary storage attribute maps containing vendor credentials
* This is the core method: format conversion via StorageProperties.createAll()
*
* @param metastoreProperties Metastore properties
* @param tableObject Table object (generics, support different data sources)
* @return Storage attribute mapping containing temporary credentials
*/
public final <T> Map<StorageProperties.Type, StorageProperties> getStoragePropertiesMapWithVendedCredentials(
MetastoreProperties metastoreProperties,
T tableObject) {

try {
if (!isVendedCredentialsEnabled(metastoreProperties) || tableObject == null) {
return null;
}

// 1. Extract original vendored credentials from table objects (such as oss.xxx, s3.xxx format)
Map<String, String> rawVendedCredentials = extractRawVendedCredentials(tableObject);
if (rawVendedCredentials == null || rawVendedCredentials.isEmpty()) {
return null;
}

// 2. Filter cloud storage properties before format conversion
Map<String, String> filteredCredentials =
CredentialUtils.filterCloudStorageProperties(rawVendedCredentials);
if (filteredCredentials.isEmpty()) {
return null;
}

// 3. Key steps: Format conversion via StorageProperties.createAll()
// This avoids writing duplicate transformation logic in the VendedCredentials class
List<StorageProperties> vendedStorageProperties = StorageProperties.createAll(filteredCredentials);

// 4. Convert to Map format
Map<StorageProperties.Type, StorageProperties> vendedPropertiesMap = vendedStorageProperties.stream()
.collect(Collectors.toMap(StorageProperties::getType, Function.identity()));

if (LOG.isDebugEnabled()) {
LOG.debug("Successfully applied vended credentials for table: {}", getTableName(tableObject));
}
return vendedPropertiesMap;

} catch (Exception e) {
LOG.warn("Failed to get vended credentials, returning null", e);
// Return null on failure, Fallback is handled by Factory
return null;
}
}

/**
* Check whether to enable vendor credentials (subclass implementation)
*/
protected abstract boolean isVendedCredentialsEnabled(MetastoreProperties metastoreProperties);

/**
* Extract original vendored credentials from table objects (subclass implementation)
* Returns the original format attribute, which is responsible for the conversion by StorageProperties.createAll()
*/
protected abstract <T> Map<String, String> extractRawVendedCredentials(T tableObject);

/**
* Get the table name (used for logs, subclasses can be rewritable)
*/
protected <T> String getTableName(T tableObject) {
return tableObject != null ? tableObject.toString() : "null";
}

}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -17,22 +17,70 @@

package org.apache.doris.datasource.credentials;

import com.google.common.annotations.VisibleForTesting;
import org.apache.doris.datasource.property.storage.StorageProperties;

import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

/**
* Utility class for Credential operations
*/
public class CredentialUtils {

/**
* Supported cloud storage prefixes for filtering vended credentials
*/
private static final Set<String> CLOUD_STORAGE_PREFIXES = new HashSet<>(Arrays.asList(
"fs.", // file system
"s3.", // Amazon S3
"oss.", // Alibaba OSS
"cos.", // Tencent COS
"obs.", // Huawei OBS
"gs.", // Google Cloud Storage
"azure." // Microsoft Azure
));

/**
* Filter cloud storage properties from raw vended credentials
* Only keeps properties with supported cloud storage prefixes
*
* @param rawVendedCredentials Raw vended credentials map
* @return Filtered cloud storage properties
*/
public static Map<String, String> filterCloudStorageProperties(Map<String, String> rawVendedCredentials) {
if (rawVendedCredentials == null || rawVendedCredentials.isEmpty()) {
return new HashMap<>();
}

Map<String, String> filtered = new HashMap<>();
rawVendedCredentials.entrySet().stream()
.filter(entry -> entry.getKey() != null && entry.getValue() != null)
.filter(entry -> CLOUD_STORAGE_PREFIXES.stream().anyMatch(prefix -> entry.getKey().startsWith(prefix)))
.forEach(entry -> filtered.put(entry.getKey(), entry.getValue()));

return filtered;
}

/**
* Future method for FileIO-based credential extraction.
* This method signature is designed to be compatible with future FileIO implementations.
* Extract backend properties from StorageProperties map
* Reference: CatalogProperty.getBackendStorageProperties()
*
* @param fileIoProperties properties from FileIO (reserved for future use)
* @param extractor custom credential extractor
* @return extracted credentials
* @param storagePropertiesMap Map of storage properties
* @return Backend properties with null values filtered out
*/
@VisibleForTesting
public static Map<String, String> extractCredentialsFromFileIO(Map<String, String> fileIoProperties,
CredentialExtractor extractor) {
return extractor.extractCredentials(fileIoProperties);
public static Map<String, String> getBackendPropertiesFromStorageMap(
Map<StorageProperties.Type, StorageProperties> storagePropertiesMap) {
Map<String, String> result = new HashMap<>();
for (StorageProperties sp : storagePropertiesMap.values()) {
Map<String, String> backendProps = sp.getBackendConfigProperties();
// the backend property's value can not be null, because it will be serialized to thrift,
// which does not support null value.
backendProps.entrySet().stream().filter(e -> e.getValue() != null)
.forEach(e -> result.put(e.getKey(), e.getValue()));
}
return result;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

package org.apache.doris.datasource.credentials;

import org.apache.doris.datasource.iceberg.IcebergVendedCredentialsProvider;
import org.apache.doris.datasource.paimon.PaimonVendedCredentialsProvider;
import org.apache.doris.datasource.property.metastore.MetastoreProperties;
import org.apache.doris.datasource.property.storage.StorageProperties;
import org.apache.doris.datasource.property.storage.StorageProperties.Type;

import java.util.Map;

public class VendedCredentialsFactory {
/**
* Core method: Get temporary storage attribute maps containing vendored credentials for Scan/Sink Node
* This method is called in Scan/Sink Node.doInitialize() to ensure internal consistency
*/
public static <T> Map<Type, StorageProperties> getStoragePropertiesMapWithVendedCredentials(
MetastoreProperties metastoreProperties,
Map<StorageProperties.Type, StorageProperties> baseStoragePropertiesMap,
T tableObject) {

AbstractVendedCredentialsProvider provider = getProviderType(metastoreProperties);
if (provider != null) {
try {
Map<Type, StorageProperties> result = provider.getStoragePropertiesMapWithVendedCredentials(
metastoreProperties, tableObject);
return result != null ? result : baseStoragePropertiesMap;
} catch (Exception e) {
return baseStoragePropertiesMap;
}
}

// Fallback to basic properties
return baseStoragePropertiesMap;
}

/**
* Select the right provider according to the MetastoreProperties type
*/
private static AbstractVendedCredentialsProvider getProviderType(MetastoreProperties metastoreProperties) {
if (metastoreProperties == null) {
return null;
}

MetastoreProperties.Type type = metastoreProperties.getType();
switch (type) {
case ICEBERG:
return IcebergVendedCredentialsProvider.getInstance();
case PAIMON:
return PaimonVendedCredentialsProvider.getInstance();
default:
// Other types do not support vendor credentials
return null;
}
}
}

This file was deleted.

Loading
Loading