Skip to content

Commit

Permalink
Refactor ContextManagerTest (#31919)
Browse files Browse the repository at this point in the history
* Refactor ContextManagerTest

* Refactor SingleRuleConfigurationDecorator
  • Loading branch information
terrymanu authored Jun 29, 2024
1 parent 60ee706 commit e99d425
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 46 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
import java.util.LinkedList;
import java.util.Map;
import java.util.Map.Entry;
import java.util.stream.Collectors;

/**
* Single rule configuration decorator.
Expand Down Expand Up @@ -78,6 +79,35 @@ private boolean isExpandRequired(final Collection<String> splitTables) {
return splitTables.stream().anyMatch(each -> each.contains(SingleTableConstants.ASTERISK));
}

private void checkRuleConfiguration(final String databaseName, final Map<String, DataSource> dataSources, final Collection<String> excludedTables, final Collection<DataNode> dataNodes) {
for (DataNode each : dataNodes) {
if (!SingleTableConstants.ASTERISK.equals(each.getDataSourceName())) {
ShardingSpherePreconditions.checkContainsKey(dataSources, each.getDataSourceName(),
() -> new InvalidSingleRuleConfigurationException(String.format("Data source `%s` does not exist in database `%s`", each.getDataSourceName(), databaseName)));
}
ShardingSpherePreconditions.checkNotContains(excludedTables, each.getTableName(),
() -> new InvalidSingleRuleConfigurationException(String.format("Table `%s` existed and is not a single table in database `%s`", each.getTableName(), databaseName)));
}
}

private Collection<String> loadAllTables(final boolean isSchemaSupportedDatabaseType, final Map<String, Collection<DataNode>> actualDataNodes) {
return actualDataNodes.values().stream().map(each -> getTableNodeString(isSchemaSupportedDatabaseType, each.iterator().next())).collect(Collectors.toList());
}

private String getTableNodeString(final boolean isSchemaSupportedDatabaseType, final DataNode dataNode) {
return isSchemaSupportedDatabaseType
? formatTableName(dataNode.getDataSourceName(), dataNode.getSchemaName(), dataNode.getTableName())
: formatTableName(dataNode.getDataSourceName(), dataNode.getTableName());
}

private String formatTableName(final String dataSourceName, final String schemaName, final String tableName) {
return String.format("%s.%s.%s", dataSourceName, schemaName, tableName);
}

private String formatTableName(final String dataSourceName, final String tableName) {
return String.format("%s.%s", dataSourceName, tableName);
}

private Collection<String> loadSpecifiedTables(final boolean isSchemaSupportedDatabaseType, final Map<String, Collection<DataNode>> actualDataNodes,
final Collection<ShardingSphereRule> builtRules, final Collection<DataNode> configuredDataNodes) {
Collection<String> expandRequiredDataSources = new LinkedHashSet<>(configuredDataNodes.size(), 1F);
Expand All @@ -89,11 +119,10 @@ private Collection<String> loadSpecifiedTables(final boolean isSchemaSupportedDa
expectedDataNodes.put(each.getTableName(), each);
}
}
if (expandRequiredDataSources.isEmpty()) {
return loadSpecifiedTablesWithoutExpand(isSchemaSupportedDatabaseType, actualDataNodes, configuredDataNodes);
}
Collection<String> featureRequiredSingleTables = SingleTableLoadUtils.getFeatureRequiredSingleTables(builtRules);
return loadSpecifiedTablesWithExpand(isSchemaSupportedDatabaseType, actualDataNodes, featureRequiredSingleTables, expandRequiredDataSources, expectedDataNodes);
return expandRequiredDataSources.isEmpty()
? loadSpecifiedTablesWithoutExpand(isSchemaSupportedDatabaseType, actualDataNodes, configuredDataNodes)
: loadSpecifiedTablesWithExpand(
isSchemaSupportedDatabaseType, actualDataNodes, SingleTableLoadUtils.getFeatureRequiredSingleTables(builtRules), expandRequiredDataSources, expectedDataNodes);
}

private Collection<String> loadSpecifiedTablesWithExpand(final boolean isSchemaSupportedDatabaseType, final Map<String, Collection<DataNode>> actualDataNodes,
Expand Down Expand Up @@ -121,54 +150,20 @@ private Collection<String> loadSpecifiedTablesWithExpand(final boolean isSchemaS
return result;
}

private Collection<String> loadSpecifiedTablesWithoutExpand(final boolean isSchemaSupportedDatabaseType, final Map<String, Collection<DataNode>> actualDataNodes,
final Collection<DataNode> configuredDataNodes) {
private Collection<String> loadSpecifiedTablesWithoutExpand(final boolean isSchemaSupportedDatabaseType,
final Map<String, Collection<DataNode>> actualDataNodes, final Collection<DataNode> configuredDataNodes) {
Collection<String> result = new LinkedHashSet<>(configuredDataNodes.size(), 1F);
for (DataNode each : configuredDataNodes) {
ShardingSpherePreconditions.checkContainsKey(actualDataNodes, each.getTableName(), () -> new SingleTableNotFoundException(getTableNodeString(isSchemaSupportedDatabaseType, each)));
DataNode actualDataNode = actualDataNodes.get(each.getTableName()).iterator().next();
String tableNodeStr = getTableNodeString(isSchemaSupportedDatabaseType, actualDataNode);
ShardingSpherePreconditions.checkState(actualDataNode.equals(each),
() -> new InvalidSingleRuleConfigurationException(String.format("Single table '%s' is found that does not match %s", tableNodeStr,
getTableNodeString(isSchemaSupportedDatabaseType, each))));
ShardingSpherePreconditions.checkState(actualDataNode.equals(each), () -> new InvalidSingleRuleConfigurationException(
String.format("Single table '%s' is found that does not match %s", tableNodeStr, getTableNodeString(isSchemaSupportedDatabaseType, each))));
result.add(tableNodeStr);
}
return result;
}

private Collection<String> loadAllTables(final boolean isSchemaSupportedDatabaseType, final Map<String, Collection<DataNode>> actualDataNodes) {
Collection<String> result = new LinkedList<>();
for (Entry<String, Collection<DataNode>> entry : actualDataNodes.entrySet()) {
result.add(getTableNodeString(isSchemaSupportedDatabaseType, entry.getValue().iterator().next()));
}
return result;
}

private String getTableNodeString(final boolean isSchemaSupportedDatabaseType, final DataNode dataNode) {
return isSchemaSupportedDatabaseType
? formatTableName(dataNode.getDataSourceName(), dataNode.getSchemaName(), dataNode.getTableName())
: formatTableName(dataNode.getDataSourceName(), dataNode.getTableName());
}

private void checkRuleConfiguration(final String databaseName, final Map<String, DataSource> dataSources, final Collection<String> excludedTables, final Collection<DataNode> dataNodes) {
for (DataNode each : dataNodes) {
if (!SingleTableConstants.ASTERISK.equals(each.getDataSourceName())) {
ShardingSpherePreconditions.checkContainsKey(dataSources, each.getDataSourceName(),
() -> new InvalidSingleRuleConfigurationException(String.format("Data source `%s` does not exist in database `%s`", each.getDataSourceName(), databaseName)));
}
ShardingSpherePreconditions.checkNotContains(excludedTables, each.getTableName(),
() -> new InvalidSingleRuleConfigurationException(String.format("Table `%s` existed and is not a single table in database `%s`", each.getTableName(), databaseName)));
}
}

private String formatTableName(final String dataSourceName, final String tableName) {
return String.format("%s.%s", dataSourceName, tableName);
}

private String formatTableName(final String dataSourceName, final String schemaName, final String tableName) {
return String.format("%s.%s.%s", dataSourceName, schemaName, tableName);
}

@Override
public Class<SingleRuleConfiguration> getType() {
return SingleRuleConfiguration.class;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import org.apache.groovy.util.Maps;
import org.apache.shardingsphere.infra.config.mode.ModeConfiguration;
import org.apache.shardingsphere.infra.config.props.ConfigurationProperties;
import org.apache.shardingsphere.infra.config.props.ConfigurationPropertyKey;
import org.apache.shardingsphere.infra.database.core.DefaultDatabase;
import org.apache.shardingsphere.infra.database.core.type.DatabaseType;
import org.apache.shardingsphere.infra.datanode.DataNode;
Expand Down Expand Up @@ -49,6 +48,12 @@
import org.apache.shardingsphere.test.util.PropertiesBuilder.Property;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Answers;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;

import java.sql.Types;
import java.util.Collections;
Expand All @@ -72,16 +77,17 @@
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;

@ExtendWith(MockitoExtension.class)
@MockitoSettings(strictness = Strictness.LENIENT)
class ContextManagerTest {

@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private MetaDataContexts metaDataContexts;

private ContextManager contextManager;

@BeforeEach
void setUp() {
metaDataContexts = mock(MetaDataContexts.class, RETURNS_DEEP_STUBS);
when(metaDataContexts.getMetaData().getProps().getValue(ConfigurationPropertyKey.KERNEL_EXECUTOR_SIZE)).thenReturn(1);
when(metaDataContexts.getMetaData().getProps()).thenReturn(new ConfigurationProperties(new Properties()));
ShardingSphereDatabase database = mockDatabase();
when(metaDataContexts.getMetaData().containsDatabase("foo_db")).thenReturn(true);
Expand Down

0 comments on commit e99d425

Please sign in to comment.